]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_hir_typeck/src/writeback.rs
Rollup merge of #105641 - Amanieu:btree_cursor, r=m-ou-se
[rust.git] / compiler / rustc_hir_typeck / src / writeback.rs
1 // Type resolution: the phase that finds all the types in the AST with
2 // unresolved type variables and replaces "ty_var" types with their
3 // substitutions.
4
5 use crate::FnCtxt;
6 use hir::def_id::LocalDefId;
7 use rustc_data_structures::fx::FxHashMap;
8 use rustc_errors::ErrorGuaranteed;
9 use rustc_hir as hir;
10 use rustc_hir::intravisit::{self, Visitor};
11 use rustc_infer::infer::error_reporting::TypeAnnotationNeeded::E0282;
12 use rustc_infer::infer::InferCtxt;
13 use rustc_middle::hir::place::Place as HirPlace;
14 use rustc_middle::mir::FakeReadCause;
15 use rustc_middle::ty::adjustment::{Adjust, Adjustment, PointerCast};
16 use rustc_middle::ty::fold::{TypeFoldable, TypeFolder, TypeSuperFoldable};
17 use rustc_middle::ty::visit::{TypeSuperVisitable, TypeVisitable};
18 use rustc_middle::ty::TypeckResults;
19 use rustc_middle::ty::{self, ClosureSizeProfileData, Ty, TyCtxt};
20 use rustc_span::symbol::sym;
21 use rustc_span::Span;
22
23 use std::mem;
24 use std::ops::ControlFlow;
25
26 ///////////////////////////////////////////////////////////////////////////
27 // Entry point
28
29 // During type inference, partially inferred types are
30 // represented using Type variables (ty::Infer). These don't appear in
31 // the final TypeckResults since all of the types should have been
32 // inferred once typeck is done.
33 // When type inference is running however, having to update the typeck
34 // typeck results every time a new type is inferred would be unreasonably slow,
35 // so instead all of the replacement happens at the end in
36 // resolve_type_vars_in_body, which creates a new TypeTables which
37 // doesn't contain any inference types.
38 impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
39     pub fn resolve_type_vars_in_body(
40         &self,
41         body: &'tcx hir::Body<'tcx>,
42     ) -> &'tcx ty::TypeckResults<'tcx> {
43         let item_def_id = self.tcx.hir().body_owner_def_id(body.id());
44
45         // This attribute causes us to dump some writeback information
46         // in the form of errors, which is used for unit tests.
47         let rustc_dump_user_substs =
48             self.tcx.has_attr(item_def_id.to_def_id(), sym::rustc_dump_user_substs);
49
50         let mut wbcx = WritebackCx::new(self, body, rustc_dump_user_substs);
51         for param in body.params {
52             wbcx.visit_node_id(param.pat.span, param.hir_id);
53         }
54         // Type only exists for constants and statics, not functions.
55         match self.tcx.hir().body_owner_kind(item_def_id) {
56             hir::BodyOwnerKind::Const | hir::BodyOwnerKind::Static(_) => {
57                 let item_hir_id = self.tcx.hir().local_def_id_to_hir_id(item_def_id);
58                 wbcx.visit_node_id(body.value.span, item_hir_id);
59             }
60             hir::BodyOwnerKind::Closure | hir::BodyOwnerKind::Fn => (),
61         }
62         wbcx.visit_body(body);
63         wbcx.visit_min_capture_map();
64         wbcx.eval_closure_size();
65         wbcx.visit_fake_reads_map();
66         wbcx.visit_closures();
67         wbcx.visit_liberated_fn_sigs();
68         wbcx.visit_fru_field_types();
69         wbcx.visit_opaque_types();
70         wbcx.visit_coercion_casts();
71         wbcx.visit_user_provided_tys();
72         wbcx.visit_user_provided_sigs();
73         wbcx.visit_generator_interior_types();
74
75         wbcx.typeck_results.rvalue_scopes =
76             mem::take(&mut self.typeck_results.borrow_mut().rvalue_scopes);
77
78         let used_trait_imports =
79             mem::take(&mut self.typeck_results.borrow_mut().used_trait_imports);
80         debug!("used_trait_imports({:?}) = {:?}", item_def_id, used_trait_imports);
81         wbcx.typeck_results.used_trait_imports = used_trait_imports;
82
83         wbcx.typeck_results.treat_byte_string_as_slice =
84             mem::take(&mut self.typeck_results.borrow_mut().treat_byte_string_as_slice);
85
86         if let Some(e) = self.tainted_by_errors() {
87             wbcx.typeck_results.tainted_by_errors = Some(e);
88         }
89
90         debug!("writeback: typeck results for {:?} are {:#?}", item_def_id, wbcx.typeck_results);
91
92         self.tcx.arena.alloc(wbcx.typeck_results)
93     }
94 }
95
96 ///////////////////////////////////////////////////////////////////////////
97 // The Writeback context. This visitor walks the HIR, checking the
98 // fn-specific typeck results to find references to types or regions. It
99 // resolves those regions to remove inference variables and writes the
100 // final result back into the master typeck results in the tcx. Here and
101 // there, it applies a few ad-hoc checks that were not convenient to
102 // do elsewhere.
103
104 struct WritebackCx<'cx, 'tcx> {
105     fcx: &'cx FnCtxt<'cx, 'tcx>,
106
107     typeck_results: ty::TypeckResults<'tcx>,
108
109     body: &'tcx hir::Body<'tcx>,
110
111     rustc_dump_user_substs: bool,
112 }
113
114 impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
115     fn new(
116         fcx: &'cx FnCtxt<'cx, 'tcx>,
117         body: &'tcx hir::Body<'tcx>,
118         rustc_dump_user_substs: bool,
119     ) -> WritebackCx<'cx, 'tcx> {
120         let owner = body.id().hir_id.owner;
121
122         WritebackCx {
123             fcx,
124             typeck_results: ty::TypeckResults::new(owner),
125             body,
126             rustc_dump_user_substs,
127         }
128     }
129
130     fn tcx(&self) -> TyCtxt<'tcx> {
131         self.fcx.tcx
132     }
133
134     fn write_ty_to_typeck_results(&mut self, hir_id: hir::HirId, ty: Ty<'tcx>) {
135         debug!("write_ty_to_typeck_results({:?}, {:?})", hir_id, ty);
136         assert!(!ty.needs_infer() && !ty.has_placeholders() && !ty.has_free_regions());
137         self.typeck_results.node_types_mut().insert(hir_id, ty);
138     }
139
140     // Hacky hack: During type-checking, we treat *all* operators
141     // as potentially overloaded. But then, during writeback, if
142     // we observe that something like `a+b` is (known to be)
143     // operating on scalars, we clear the overload.
144     fn fix_scalar_builtin_expr(&mut self, e: &hir::Expr<'_>) {
145         match e.kind {
146             hir::ExprKind::Unary(hir::UnOp::Neg | hir::UnOp::Not, inner) => {
147                 let inner_ty = self.fcx.node_ty(inner.hir_id);
148                 let inner_ty = self.fcx.resolve_vars_if_possible(inner_ty);
149
150                 if inner_ty.is_scalar() {
151                     let mut typeck_results = self.fcx.typeck_results.borrow_mut();
152                     typeck_results.type_dependent_defs_mut().remove(e.hir_id);
153                     typeck_results.node_substs_mut().remove(e.hir_id);
154                 }
155             }
156             hir::ExprKind::Binary(ref op, lhs, rhs) | hir::ExprKind::AssignOp(ref op, lhs, rhs) => {
157                 let lhs_ty = self.fcx.node_ty(lhs.hir_id);
158                 let lhs_ty = self.fcx.resolve_vars_if_possible(lhs_ty);
159
160                 let rhs_ty = self.fcx.node_ty(rhs.hir_id);
161                 let rhs_ty = self.fcx.resolve_vars_if_possible(rhs_ty);
162
163                 if lhs_ty.is_scalar() && rhs_ty.is_scalar() {
164                     let mut typeck_results = self.fcx.typeck_results.borrow_mut();
165                     typeck_results.type_dependent_defs_mut().remove(e.hir_id);
166                     typeck_results.node_substs_mut().remove(e.hir_id);
167
168                     match e.kind {
169                         hir::ExprKind::Binary(..) => {
170                             if !op.node.is_by_value() {
171                                 let mut adjustments = typeck_results.adjustments_mut();
172                                 if let Some(a) = adjustments.get_mut(lhs.hir_id) {
173                                     a.pop();
174                                 }
175                                 if let Some(a) = adjustments.get_mut(rhs.hir_id) {
176                                     a.pop();
177                                 }
178                             }
179                         }
180                         hir::ExprKind::AssignOp(..)
181                             if let Some(a) = typeck_results.adjustments_mut().get_mut(lhs.hir_id) =>
182                         {
183                             a.pop();
184                         }
185                         _ => {}
186                     }
187                 }
188             }
189             _ => {}
190         }
191     }
192
193     // (ouz-a 1005988): Normally `[T] : std::ops::Index<usize>` should be normalized
194     // into [T] but currently `Where` clause stops the normalization process for it,
195     // here we compare types of expr and base in a code without `Where` clause they would be equal
196     // if they are not we don't modify the expr, hence we bypass the ICE
197     fn is_builtin_index(
198         &mut self,
199         typeck_results: &TypeckResults<'tcx>,
200         e: &hir::Expr<'_>,
201         base_ty: Ty<'tcx>,
202         index_ty: Ty<'tcx>,
203     ) -> bool {
204         if let Some(elem_ty) = base_ty.builtin_index() {
205             let Some(exp_ty) = typeck_results.expr_ty_opt(e) else {return false;};
206             let resolved_exp_ty = self.resolve(exp_ty, &e.span);
207
208             elem_ty == resolved_exp_ty && index_ty == self.fcx.tcx.types.usize
209         } else {
210             false
211         }
212     }
213
214     // Similar to operators, indexing is always assumed to be overloaded
215     // Here, correct cases where an indexing expression can be simplified
216     // to use builtin indexing because the index type is known to be
217     // usize-ish
218     fn fix_index_builtin_expr(&mut self, e: &hir::Expr<'_>) {
219         if let hir::ExprKind::Index(ref base, ref index) = e.kind {
220             let mut typeck_results = self.fcx.typeck_results.borrow_mut();
221
222             // All valid indexing looks like this; might encounter non-valid indexes at this point.
223             let base_ty = typeck_results
224                 .expr_ty_adjusted_opt(base)
225                 .map(|t| self.fcx.resolve_vars_if_possible(t).kind());
226             if base_ty.is_none() {
227                 // When encountering `return [0][0]` outside of a `fn` body we can encounter a base
228                 // that isn't in the type table. We assume more relevant errors have already been
229                 // emitted, so we delay an ICE if none have. (#64638)
230                 self.tcx().sess.delay_span_bug(e.span, &format!("bad base: `{:?}`", base));
231             }
232             if let Some(ty::Ref(_, base_ty, _)) = base_ty {
233                 let index_ty = typeck_results.expr_ty_adjusted_opt(index).unwrap_or_else(|| {
234                     // When encountering `return [0][0]` outside of a `fn` body we would attempt
235                     // to access an nonexistent index. We assume that more relevant errors will
236                     // already have been emitted, so we only gate on this with an ICE if no
237                     // error has been emitted. (#64638)
238                     self.fcx.tcx.ty_error_with_message(
239                         e.span,
240                         &format!("bad index {:?} for base: `{:?}`", index, base),
241                     )
242                 });
243                 let index_ty = self.fcx.resolve_vars_if_possible(index_ty);
244                 let resolved_base_ty = self.resolve(*base_ty, &base.span);
245
246                 if self.is_builtin_index(&typeck_results, e, resolved_base_ty, index_ty) {
247                     // Remove the method call record
248                     typeck_results.type_dependent_defs_mut().remove(e.hir_id);
249                     typeck_results.node_substs_mut().remove(e.hir_id);
250
251                     if let Some(a) = typeck_results.adjustments_mut().get_mut(base.hir_id) {
252                         // Discard the need for a mutable borrow
253
254                         // Extra adjustment made when indexing causes a drop
255                         // of size information - we need to get rid of it
256                         // Since this is "after" the other adjustment to be
257                         // discarded, we do an extra `pop()`
258                         if let Some(Adjustment {
259                             kind: Adjust::Pointer(PointerCast::Unsize), ..
260                         }) = a.pop()
261                         {
262                             // So the borrow discard actually happens here
263                             a.pop();
264                         }
265                     }
266                 }
267             }
268         }
269     }
270 }
271
272 ///////////////////////////////////////////////////////////////////////////
273 // Impl of Visitor for Resolver
274 //
275 // This is the master code which walks the AST. It delegates most of
276 // the heavy lifting to the generic visit and resolve functions
277 // below. In general, a function is made into a `visitor` if it must
278 // traffic in node-ids or update typeck results in the type context etc.
279
280 impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> {
281     fn visit_expr(&mut self, e: &'tcx hir::Expr<'tcx>) {
282         self.fix_scalar_builtin_expr(e);
283         self.fix_index_builtin_expr(e);
284
285         match e.kind {
286             hir::ExprKind::Closure(&hir::Closure { body, .. }) => {
287                 let body = self.fcx.tcx.hir().body(body);
288                 for param in body.params {
289                     self.visit_node_id(e.span, param.hir_id);
290                 }
291
292                 self.visit_body(body);
293             }
294             hir::ExprKind::Struct(_, fields, _) => {
295                 for field in fields {
296                     self.visit_field_id(field.hir_id);
297                 }
298             }
299             hir::ExprKind::Field(..) => {
300                 self.visit_field_id(e.hir_id);
301             }
302             hir::ExprKind::ConstBlock(anon_const) => {
303                 self.visit_node_id(e.span, anon_const.hir_id);
304
305                 let body = self.tcx().hir().body(anon_const.body);
306                 self.visit_body(body);
307             }
308             _ => {}
309         }
310
311         self.visit_node_id(e.span, e.hir_id);
312         intravisit::walk_expr(self, e);
313     }
314
315     fn visit_generic_param(&mut self, p: &'tcx hir::GenericParam<'tcx>) {
316         match &p.kind {
317             hir::GenericParamKind::Lifetime { .. } => {
318                 // Nothing to write back here
319             }
320             hir::GenericParamKind::Type { .. } | hir::GenericParamKind::Const { .. } => {
321                 self.tcx().sess.delay_span_bug(p.span, format!("unexpected generic param: {p:?}"));
322             }
323         }
324     }
325
326     fn visit_block(&mut self, b: &'tcx hir::Block<'tcx>) {
327         self.visit_node_id(b.span, b.hir_id);
328         intravisit::walk_block(self, b);
329     }
330
331     fn visit_pat(&mut self, p: &'tcx hir::Pat<'tcx>) {
332         match p.kind {
333             hir::PatKind::Binding(..) => {
334                 let typeck_results = self.fcx.typeck_results.borrow();
335                 if let Some(bm) =
336                     typeck_results.extract_binding_mode(self.tcx().sess, p.hir_id, p.span)
337                 {
338                     self.typeck_results.pat_binding_modes_mut().insert(p.hir_id, bm);
339                 }
340             }
341             hir::PatKind::Struct(_, fields, _) => {
342                 for field in fields {
343                     self.visit_field_id(field.hir_id);
344                 }
345             }
346             _ => {}
347         };
348
349         self.visit_pat_adjustments(p.span, p.hir_id);
350
351         self.visit_node_id(p.span, p.hir_id);
352         intravisit::walk_pat(self, p);
353     }
354
355     fn visit_local(&mut self, l: &'tcx hir::Local<'tcx>) {
356         intravisit::walk_local(self, l);
357         let var_ty = self.fcx.local_ty(l.span, l.hir_id).decl_ty;
358         let var_ty = self.resolve(var_ty, &l.span);
359         self.write_ty_to_typeck_results(l.hir_id, var_ty);
360     }
361
362     fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty<'tcx>) {
363         intravisit::walk_ty(self, hir_ty);
364         // If there are type checking errors, Type privacy pass will stop,
365         // so we may not get the type from hid_id, see #104513
366         if let Some(ty) = self.fcx.node_ty_opt(hir_ty.hir_id) {
367             let ty = self.resolve(ty, &hir_ty.span);
368             self.write_ty_to_typeck_results(hir_ty.hir_id, ty);
369         }
370     }
371
372     fn visit_infer(&mut self, inf: &'tcx hir::InferArg) {
373         intravisit::walk_inf(self, inf);
374         // Ignore cases where the inference is a const.
375         if let Some(ty) = self.fcx.node_ty_opt(inf.hir_id) {
376             let ty = self.resolve(ty, &inf.span);
377             self.write_ty_to_typeck_results(inf.hir_id, ty);
378         }
379     }
380 }
381
382 impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
383     fn eval_closure_size(&mut self) {
384         let mut res: FxHashMap<LocalDefId, ClosureSizeProfileData<'tcx>> = Default::default();
385         for (&closure_def_id, data) in self.fcx.typeck_results.borrow().closure_size_eval.iter() {
386             let closure_hir_id = self.tcx().hir().local_def_id_to_hir_id(closure_def_id);
387
388             let data = self.resolve(*data, &closure_hir_id);
389
390             res.insert(closure_def_id, data);
391         }
392
393         self.typeck_results.closure_size_eval = res;
394     }
395     fn visit_min_capture_map(&mut self) {
396         let mut min_captures_wb = ty::MinCaptureInformationMap::with_capacity_and_hasher(
397             self.fcx.typeck_results.borrow().closure_min_captures.len(),
398             Default::default(),
399         );
400         for (&closure_def_id, root_min_captures) in
401             self.fcx.typeck_results.borrow().closure_min_captures.iter()
402         {
403             let mut root_var_map_wb = ty::RootVariableMinCaptureList::with_capacity_and_hasher(
404                 root_min_captures.len(),
405                 Default::default(),
406             );
407             for (var_hir_id, min_list) in root_min_captures.iter() {
408                 let min_list_wb = min_list
409                     .iter()
410                     .map(|captured_place| {
411                         let locatable = captured_place.info.path_expr_id.unwrap_or_else(|| {
412                             self.tcx().hir().local_def_id_to_hir_id(closure_def_id)
413                         });
414
415                         self.resolve(captured_place.clone(), &locatable)
416                     })
417                     .collect();
418                 root_var_map_wb.insert(*var_hir_id, min_list_wb);
419             }
420             min_captures_wb.insert(closure_def_id, root_var_map_wb);
421         }
422
423         self.typeck_results.closure_min_captures = min_captures_wb;
424     }
425
426     fn visit_fake_reads_map(&mut self) {
427         let mut resolved_closure_fake_reads: FxHashMap<
428             LocalDefId,
429             Vec<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>,
430         > = Default::default();
431         for (&closure_def_id, fake_reads) in
432             self.fcx.typeck_results.borrow().closure_fake_reads.iter()
433         {
434             let mut resolved_fake_reads = Vec::<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>::new();
435             for (place, cause, hir_id) in fake_reads.iter() {
436                 let locatable = self.tcx().hir().local_def_id_to_hir_id(closure_def_id);
437
438                 let resolved_fake_read = self.resolve(place.clone(), &locatable);
439                 resolved_fake_reads.push((resolved_fake_read, *cause, *hir_id));
440             }
441             resolved_closure_fake_reads.insert(closure_def_id, resolved_fake_reads);
442         }
443         self.typeck_results.closure_fake_reads = resolved_closure_fake_reads;
444     }
445
446     fn visit_closures(&mut self) {
447         let fcx_typeck_results = self.fcx.typeck_results.borrow();
448         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
449         let common_hir_owner = fcx_typeck_results.hir_owner;
450
451         let fcx_closure_kind_origins =
452             fcx_typeck_results.closure_kind_origins().items_in_stable_order();
453
454         for (local_id, origin) in fcx_closure_kind_origins {
455             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
456             let place_span = origin.0;
457             let place = self.resolve(origin.1.clone(), &place_span);
458             self.typeck_results.closure_kind_origins_mut().insert(hir_id, (place_span, place));
459         }
460     }
461
462     fn visit_coercion_casts(&mut self) {
463         let fcx_typeck_results = self.fcx.typeck_results.borrow();
464
465         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
466
467         let fcx_coercion_casts = fcx_typeck_results.coercion_casts().to_sorted_stable_ord();
468         for local_id in fcx_coercion_casts {
469             self.typeck_results.set_coercion_cast(local_id);
470         }
471     }
472
473     fn visit_user_provided_tys(&mut self) {
474         let fcx_typeck_results = self.fcx.typeck_results.borrow();
475         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
476         let common_hir_owner = fcx_typeck_results.hir_owner;
477
478         if self.rustc_dump_user_substs {
479             let sorted_user_provided_types =
480                 fcx_typeck_results.user_provided_types().items_in_stable_order();
481
482             let mut errors_buffer = Vec::new();
483             for (local_id, c_ty) in sorted_user_provided_types {
484                 let hir_id = hir::HirId { owner: common_hir_owner, local_id };
485
486                 if let ty::UserType::TypeOf(_, user_substs) = c_ty.value {
487                     // This is a unit-testing mechanism.
488                     let span = self.tcx().hir().span(hir_id);
489                     // We need to buffer the errors in order to guarantee a consistent
490                     // order when emitting them.
491                     let err = self
492                         .tcx()
493                         .sess
494                         .struct_span_err(span, &format!("user substs: {:?}", user_substs));
495                     err.buffer(&mut errors_buffer);
496                 }
497             }
498
499             if !errors_buffer.is_empty() {
500                 errors_buffer.sort_by_key(|diag| diag.span.primary_span());
501                 for mut diag in errors_buffer {
502                     self.tcx().sess.diagnostic().emit_diagnostic(&mut diag);
503                 }
504             }
505         }
506
507         self.typeck_results.user_provided_types_mut().extend(
508             fcx_typeck_results.user_provided_types().items().map(|(local_id, c_ty)| {
509                 let hir_id = hir::HirId { owner: common_hir_owner, local_id };
510
511                 if cfg!(debug_assertions) && c_ty.needs_infer() {
512                     span_bug!(
513                         hir_id.to_span(self.fcx.tcx),
514                         "writeback: `{:?}` has inference variables",
515                         c_ty
516                     );
517                 };
518
519                 (hir_id, *c_ty)
520             }),
521         );
522     }
523
524     fn visit_user_provided_sigs(&mut self) {
525         let fcx_typeck_results = self.fcx.typeck_results.borrow();
526         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
527
528         self.typeck_results.user_provided_sigs.extend(
529             fcx_typeck_results.user_provided_sigs.items().map(|(&def_id, c_sig)| {
530                 if cfg!(debug_assertions) && c_sig.needs_infer() {
531                     span_bug!(
532                         self.fcx.tcx.def_span(def_id),
533                         "writeback: `{:?}` has inference variables",
534                         c_sig
535                     );
536                 };
537
538                 (def_id, *c_sig)
539             }),
540         );
541     }
542
543     fn visit_generator_interior_types(&mut self) {
544         let fcx_typeck_results = self.fcx.typeck_results.borrow();
545         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
546         self.typeck_results.generator_interior_types =
547             fcx_typeck_results.generator_interior_types.clone();
548         for (&expr_def_id, predicates) in fcx_typeck_results.generator_interior_predicates.iter() {
549             let predicates = self.resolve(predicates.clone(), &self.fcx.tcx.def_span(expr_def_id));
550             self.typeck_results.generator_interior_predicates.insert(expr_def_id, predicates);
551         }
552     }
553
554     #[instrument(skip(self), level = "debug")]
555     fn visit_opaque_types(&mut self) {
556         let opaque_types = self.fcx.infcx.take_opaque_types();
557         for (opaque_type_key, decl) in opaque_types {
558             let hidden_type = self.resolve(decl.hidden_type, &decl.hidden_type.span);
559             let opaque_type_key = self.resolve(opaque_type_key, &decl.hidden_type.span);
560
561             struct RecursionChecker {
562                 def_id: LocalDefId,
563             }
564             impl<'tcx> ty::TypeVisitor<'tcx> for RecursionChecker {
565                 type BreakTy = ();
566                 fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
567                     if let ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }) = *t.kind() {
568                         if def_id == self.def_id.to_def_id() {
569                             return ControlFlow::Break(());
570                         }
571                     }
572                     t.super_visit_with(self)
573                 }
574             }
575             if hidden_type
576                 .visit_with(&mut RecursionChecker { def_id: opaque_type_key.def_id })
577                 .is_break()
578             {
579                 continue;
580             }
581
582             let hidden_type = hidden_type.remap_generic_params_to_declaration_params(
583                 opaque_type_key,
584                 self.fcx.infcx.tcx,
585                 true,
586             );
587
588             self.typeck_results.concrete_opaque_types.insert(opaque_type_key.def_id, hidden_type);
589         }
590     }
591
592     fn visit_field_id(&mut self, hir_id: hir::HirId) {
593         if let Some(index) = self.fcx.typeck_results.borrow_mut().field_indices_mut().remove(hir_id)
594         {
595             self.typeck_results.field_indices_mut().insert(hir_id, index);
596         }
597     }
598
599     #[instrument(skip(self, span), level = "debug")]
600     fn visit_node_id(&mut self, span: Span, hir_id: hir::HirId) {
601         // Export associated path extensions and method resolutions.
602         if let Some(def) =
603             self.fcx.typeck_results.borrow_mut().type_dependent_defs_mut().remove(hir_id)
604         {
605             self.typeck_results.type_dependent_defs_mut().insert(hir_id, def);
606         }
607
608         // Resolve any borrowings for the node with id `node_id`
609         self.visit_adjustments(span, hir_id);
610
611         // Resolve the type of the node with id `node_id`
612         let n_ty = self.fcx.node_ty(hir_id);
613         let n_ty = self.resolve(n_ty, &span);
614         self.write_ty_to_typeck_results(hir_id, n_ty);
615         debug!(?n_ty);
616
617         // Resolve any substitutions
618         if let Some(substs) = self.fcx.typeck_results.borrow().node_substs_opt(hir_id) {
619             let substs = self.resolve(substs, &span);
620             debug!("write_substs_to_tcx({:?}, {:?})", hir_id, substs);
621             assert!(!substs.needs_infer() && !substs.has_placeholders());
622             self.typeck_results.node_substs_mut().insert(hir_id, substs);
623         }
624     }
625
626     #[instrument(skip(self, span), level = "debug")]
627     fn visit_adjustments(&mut self, span: Span, hir_id: hir::HirId) {
628         let adjustment = self.fcx.typeck_results.borrow_mut().adjustments_mut().remove(hir_id);
629         match adjustment {
630             None => {
631                 debug!("no adjustments for node");
632             }
633
634             Some(adjustment) => {
635                 let resolved_adjustment = self.resolve(adjustment, &span);
636                 debug!(?resolved_adjustment);
637                 self.typeck_results.adjustments_mut().insert(hir_id, resolved_adjustment);
638             }
639         }
640     }
641
642     #[instrument(skip(self, span), level = "debug")]
643     fn visit_pat_adjustments(&mut self, span: Span, hir_id: hir::HirId) {
644         let adjustment = self.fcx.typeck_results.borrow_mut().pat_adjustments_mut().remove(hir_id);
645         match adjustment {
646             None => {
647                 debug!("no pat_adjustments for node");
648             }
649
650             Some(adjustment) => {
651                 let resolved_adjustment = self.resolve(adjustment, &span);
652                 debug!(?resolved_adjustment);
653                 self.typeck_results.pat_adjustments_mut().insert(hir_id, resolved_adjustment);
654             }
655         }
656     }
657
658     fn visit_liberated_fn_sigs(&mut self) {
659         let fcx_typeck_results = self.fcx.typeck_results.borrow();
660         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
661         let common_hir_owner = fcx_typeck_results.hir_owner;
662
663         let fcx_liberated_fn_sigs = fcx_typeck_results.liberated_fn_sigs().items_in_stable_order();
664
665         for (local_id, &fn_sig) in fcx_liberated_fn_sigs {
666             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
667             let fn_sig = self.resolve(fn_sig, &hir_id);
668             self.typeck_results.liberated_fn_sigs_mut().insert(hir_id, fn_sig);
669         }
670     }
671
672     fn visit_fru_field_types(&mut self) {
673         let fcx_typeck_results = self.fcx.typeck_results.borrow();
674         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
675         let common_hir_owner = fcx_typeck_results.hir_owner;
676
677         let fcx_fru_field_types = fcx_typeck_results.fru_field_types().items_in_stable_order();
678
679         for (local_id, ftys) in fcx_fru_field_types {
680             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
681             let ftys = self.resolve(ftys.clone(), &hir_id);
682             self.typeck_results.fru_field_types_mut().insert(hir_id, ftys);
683         }
684     }
685
686     fn resolve<T>(&mut self, x: T, span: &dyn Locatable) -> T
687     where
688         T: TypeFoldable<'tcx>,
689     {
690         let mut resolver = Resolver::new(self.fcx, span, self.body);
691         let x = x.fold_with(&mut resolver);
692         if cfg!(debug_assertions) && x.needs_infer() {
693             span_bug!(span.to_span(self.fcx.tcx), "writeback: `{:?}` has inference variables", x);
694         }
695
696         // We may have introduced e.g. `ty::Error`, if inference failed, make sure
697         // to mark the `TypeckResults` as tainted in that case, so that downstream
698         // users of the typeck results don't produce extra errors, or worse, ICEs.
699         if let Some(e) = resolver.replaced_with_error {
700             self.typeck_results.tainted_by_errors = Some(e);
701         }
702
703         x
704     }
705 }
706
707 pub(crate) trait Locatable {
708     fn to_span(&self, tcx: TyCtxt<'_>) -> Span;
709 }
710
711 impl Locatable for Span {
712     fn to_span(&self, _: TyCtxt<'_>) -> Span {
713         *self
714     }
715 }
716
717 impl Locatable for hir::HirId {
718     fn to_span(&self, tcx: TyCtxt<'_>) -> Span {
719         tcx.hir().span(*self)
720     }
721 }
722
723 /// The Resolver. This is the type folding engine that detects
724 /// unresolved types and so forth.
725 struct Resolver<'cx, 'tcx> {
726     tcx: TyCtxt<'tcx>,
727     infcx: &'cx InferCtxt<'tcx>,
728     span: &'cx dyn Locatable,
729     body: &'tcx hir::Body<'tcx>,
730
731     /// Set to `Some` if any `Ty` or `ty::Const` had to be replaced with an `Error`.
732     replaced_with_error: Option<ErrorGuaranteed>,
733 }
734
735 impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
736     fn new(
737         fcx: &'cx FnCtxt<'cx, 'tcx>,
738         span: &'cx dyn Locatable,
739         body: &'tcx hir::Body<'tcx>,
740     ) -> Resolver<'cx, 'tcx> {
741         Resolver { tcx: fcx.tcx, infcx: fcx, span, body, replaced_with_error: None }
742     }
743
744     fn report_error(&self, p: impl Into<ty::GenericArg<'tcx>>) -> ErrorGuaranteed {
745         match self.tcx.sess.has_errors() {
746             Some(e) => e,
747             None => self
748                 .infcx
749                 .err_ctxt()
750                 .emit_inference_failure_err(
751                     Some(self.body.id()),
752                     self.span.to_span(self.tcx),
753                     p.into(),
754                     E0282,
755                     false,
756                 )
757                 .emit(),
758         }
759     }
760 }
761
762 struct EraseEarlyRegions<'tcx> {
763     tcx: TyCtxt<'tcx>,
764 }
765
766 impl<'tcx> TypeFolder<'tcx> for EraseEarlyRegions<'tcx> {
767     fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
768         self.tcx
769     }
770     fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
771         if ty.has_type_flags(ty::TypeFlags::HAS_FREE_REGIONS) {
772             ty.super_fold_with(self)
773         } else {
774             ty
775         }
776     }
777     fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
778         if r.is_late_bound() { r } else { self.tcx.lifetimes.re_erased }
779     }
780 }
781
782 impl<'cx, 'tcx> TypeFolder<'tcx> for Resolver<'cx, 'tcx> {
783     fn tcx<'a>(&'a self) -> TyCtxt<'tcx> {
784         self.tcx
785     }
786
787     fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
788         match self.infcx.fully_resolve(t) {
789             Ok(t) => {
790                 // Do not anonymize late-bound regions
791                 // (e.g. keep `for<'a>` named `for<'a>`).
792                 // This allows NLL to generate error messages that
793                 // refer to the higher-ranked lifetime names written by the user.
794                 EraseEarlyRegions { tcx: self.tcx }.fold_ty(t)
795             }
796             Err(_) => {
797                 debug!("Resolver::fold_ty: input type `{:?}` not fully resolvable", t);
798                 let e = self.report_error(t);
799                 self.replaced_with_error = Some(e);
800                 self.tcx().ty_error_with_guaranteed(e)
801             }
802         }
803     }
804
805     fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
806         debug_assert!(!r.is_late_bound(), "Should not be resolving bound region.");
807         self.tcx.lifetimes.re_erased
808     }
809
810     fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> {
811         match self.infcx.fully_resolve(ct) {
812             Ok(ct) => self.tcx.erase_regions(ct),
813             Err(_) => {
814                 debug!("Resolver::fold_const: input const `{:?}` not fully resolvable", ct);
815                 let e = self.report_error(ct);
816                 self.replaced_with_error = Some(e);
817                 self.tcx().const_error_with_guaranteed(ct.ty(), e)
818             }
819         }
820     }
821 }
822
823 ///////////////////////////////////////////////////////////////////////////
824 // During type check, we store promises with the result of trait
825 // lookup rather than the actual results (because the results are not
826 // necessarily available immediately). These routines unwind the
827 // promises. It is expected that we will have already reported any
828 // errors that may be encountered, so if the promises store an error,
829 // a dummy result is returned.