]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_typeck/src/check/writeback.rs
Rollup merge of #100384 - ridwanabdillahi:instr_profile_output, r=wesleywiser
[rust.git] / compiler / rustc_typeck / src / check / writeback.rs
1 // Type resolution: the phase that finds all the types in the AST with
2 // unresolved type variables and replaces "ty_var" types with their
3 // substitutions.
4
5 use crate::check::FnCtxt;
6
7 use hir::def_id::LocalDefId;
8 use rustc_data_structures::fx::FxHashMap;
9 use rustc_errors::ErrorGuaranteed;
10 use rustc_hir as hir;
11 use rustc_hir::intravisit::{self, Visitor};
12 use rustc_infer::infer::error_reporting::TypeAnnotationNeeded::E0282;
13 use rustc_infer::infer::InferCtxt;
14 use rustc_middle::hir::place::Place as HirPlace;
15 use rustc_middle::mir::FakeReadCause;
16 use rustc_middle::ty::adjustment::{Adjust, Adjustment, PointerCast};
17 use rustc_middle::ty::fold::{TypeFoldable, TypeFolder, TypeSuperFoldable};
18 use rustc_middle::ty::visit::{TypeSuperVisitable, TypeVisitable};
19 use rustc_middle::ty::{self, ClosureSizeProfileData, Ty, TyCtxt};
20 use rustc_span::symbol::sym;
21 use rustc_span::Span;
22
23 use std::mem;
24 use std::ops::ControlFlow;
25
26 ///////////////////////////////////////////////////////////////////////////
27 // Entry point
28
29 // During type inference, partially inferred types are
30 // represented using Type variables (ty::Infer). These don't appear in
31 // the final TypeckResults since all of the types should have been
32 // inferred once typeck is done.
33 // When type inference is running however, having to update the typeck
34 // typeck results every time a new type is inferred would be unreasonably slow,
35 // so instead all of the replacement happens at the end in
36 // resolve_type_vars_in_body, which creates a new TypeTables which
37 // doesn't contain any inference types.
38 impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
39     pub fn resolve_type_vars_in_body(
40         &self,
41         body: &'tcx hir::Body<'tcx>,
42     ) -> &'tcx ty::TypeckResults<'tcx> {
43         let item_id = self.tcx.hir().body_owner(body.id());
44         let item_def_id = self.tcx.hir().local_def_id(item_id);
45
46         // This attribute causes us to dump some writeback information
47         // in the form of errors, which is used for unit tests.
48         let rustc_dump_user_substs =
49             self.tcx.has_attr(item_def_id.to_def_id(), sym::rustc_dump_user_substs);
50
51         let mut wbcx = WritebackCx::new(self, body, rustc_dump_user_substs);
52         for param in body.params {
53             wbcx.visit_node_id(param.pat.span, param.hir_id);
54         }
55         // Type only exists for constants and statics, not functions.
56         match self.tcx.hir().body_owner_kind(item_def_id) {
57             hir::BodyOwnerKind::Const | hir::BodyOwnerKind::Static(_) => {
58                 wbcx.visit_node_id(body.value.span, item_id);
59             }
60             hir::BodyOwnerKind::Closure | hir::BodyOwnerKind::Fn => (),
61         }
62         wbcx.visit_body(body);
63         wbcx.visit_min_capture_map();
64         wbcx.eval_closure_size();
65         wbcx.visit_fake_reads_map();
66         wbcx.visit_closures();
67         wbcx.visit_liberated_fn_sigs();
68         wbcx.visit_fru_field_types();
69         wbcx.visit_opaque_types();
70         wbcx.visit_coercion_casts();
71         wbcx.visit_user_provided_tys();
72         wbcx.visit_user_provided_sigs();
73         wbcx.visit_generator_interior_types();
74
75         wbcx.typeck_results.rvalue_scopes =
76             mem::take(&mut self.typeck_results.borrow_mut().rvalue_scopes);
77
78         let used_trait_imports =
79             mem::take(&mut self.typeck_results.borrow_mut().used_trait_imports);
80         debug!("used_trait_imports({:?}) = {:?}", item_def_id, used_trait_imports);
81         wbcx.typeck_results.used_trait_imports = used_trait_imports;
82
83         wbcx.typeck_results.treat_byte_string_as_slice =
84             mem::take(&mut self.typeck_results.borrow_mut().treat_byte_string_as_slice);
85
86         if self.is_tainted_by_errors() {
87             // FIXME(eddyb) keep track of `ErrorGuaranteed` from where the error was emitted.
88             wbcx.typeck_results.tainted_by_errors =
89                 Some(ErrorGuaranteed::unchecked_claim_error_was_emitted());
90         }
91
92         debug!("writeback: typeck results for {:?} are {:#?}", item_def_id, wbcx.typeck_results);
93
94         self.tcx.arena.alloc(wbcx.typeck_results)
95     }
96 }
97
98 ///////////////////////////////////////////////////////////////////////////
99 // The Writeback context. This visitor walks the HIR, checking the
100 // fn-specific typeck results to find references to types or regions. It
101 // resolves those regions to remove inference variables and writes the
102 // final result back into the master typeck results in the tcx. Here and
103 // there, it applies a few ad-hoc checks that were not convenient to
104 // do elsewhere.
105
106 struct WritebackCx<'cx, 'tcx> {
107     fcx: &'cx FnCtxt<'cx, 'tcx>,
108
109     typeck_results: ty::TypeckResults<'tcx>,
110
111     body: &'tcx hir::Body<'tcx>,
112
113     rustc_dump_user_substs: bool,
114 }
115
116 impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
117     fn new(
118         fcx: &'cx FnCtxt<'cx, 'tcx>,
119         body: &'tcx hir::Body<'tcx>,
120         rustc_dump_user_substs: bool,
121     ) -> WritebackCx<'cx, 'tcx> {
122         let owner = body.id().hir_id.owner;
123
124         WritebackCx {
125             fcx,
126             typeck_results: ty::TypeckResults::new(owner),
127             body,
128             rustc_dump_user_substs,
129         }
130     }
131
132     fn tcx(&self) -> TyCtxt<'tcx> {
133         self.fcx.tcx
134     }
135
136     fn write_ty_to_typeck_results(&mut self, hir_id: hir::HirId, ty: Ty<'tcx>) {
137         debug!("write_ty_to_typeck_results({:?}, {:?})", hir_id, ty);
138         assert!(!ty.needs_infer() && !ty.has_placeholders() && !ty.has_free_regions());
139         self.typeck_results.node_types_mut().insert(hir_id, ty);
140     }
141
142     // Hacky hack: During type-checking, we treat *all* operators
143     // as potentially overloaded. But then, during writeback, if
144     // we observe that something like `a+b` is (known to be)
145     // operating on scalars, we clear the overload.
146     fn fix_scalar_builtin_expr(&mut self, e: &hir::Expr<'_>) {
147         match e.kind {
148             hir::ExprKind::Unary(hir::UnOp::Neg | hir::UnOp::Not, inner) => {
149                 let inner_ty = self.fcx.node_ty(inner.hir_id);
150                 let inner_ty = self.fcx.resolve_vars_if_possible(inner_ty);
151
152                 if inner_ty.is_scalar() {
153                     let mut typeck_results = self.fcx.typeck_results.borrow_mut();
154                     typeck_results.type_dependent_defs_mut().remove(e.hir_id);
155                     typeck_results.node_substs_mut().remove(e.hir_id);
156                 }
157             }
158             hir::ExprKind::Binary(ref op, lhs, rhs) | hir::ExprKind::AssignOp(ref op, lhs, rhs) => {
159                 let lhs_ty = self.fcx.node_ty(lhs.hir_id);
160                 let lhs_ty = self.fcx.resolve_vars_if_possible(lhs_ty);
161
162                 let rhs_ty = self.fcx.node_ty(rhs.hir_id);
163                 let rhs_ty = self.fcx.resolve_vars_if_possible(rhs_ty);
164
165                 if lhs_ty.is_scalar() && rhs_ty.is_scalar() {
166                     let mut typeck_results = self.fcx.typeck_results.borrow_mut();
167                     typeck_results.type_dependent_defs_mut().remove(e.hir_id);
168                     typeck_results.node_substs_mut().remove(e.hir_id);
169
170                     match e.kind {
171                         hir::ExprKind::Binary(..) => {
172                             if !op.node.is_by_value() {
173                                 let mut adjustments = typeck_results.adjustments_mut();
174                                 if let Some(a) = adjustments.get_mut(lhs.hir_id) {
175                                     a.pop();
176                                 }
177                                 if let Some(a) = adjustments.get_mut(rhs.hir_id) {
178                                     a.pop();
179                                 }
180                             }
181                         }
182                         hir::ExprKind::AssignOp(..)
183                             if let Some(a) = typeck_results.adjustments_mut().get_mut(lhs.hir_id) =>
184                         {
185                             a.pop();
186                         }
187                         _ => {}
188                     }
189                 }
190             }
191             _ => {}
192         }
193     }
194
195     // Similar to operators, indexing is always assumed to be overloaded
196     // Here, correct cases where an indexing expression can be simplified
197     // to use builtin indexing because the index type is known to be
198     // usize-ish
199     fn fix_index_builtin_expr(&mut self, e: &hir::Expr<'_>) {
200         if let hir::ExprKind::Index(ref base, ref index) = e.kind {
201             let mut typeck_results = self.fcx.typeck_results.borrow_mut();
202
203             // All valid indexing looks like this; might encounter non-valid indexes at this point.
204             let base_ty = typeck_results
205                 .expr_ty_adjusted_opt(base)
206                 .map(|t| self.fcx.resolve_vars_if_possible(t).kind());
207             if base_ty.is_none() {
208                 // When encountering `return [0][0]` outside of a `fn` body we can encounter a base
209                 // that isn't in the type table. We assume more relevant errors have already been
210                 // emitted, so we delay an ICE if none have. (#64638)
211                 self.tcx().sess.delay_span_bug(e.span, &format!("bad base: `{:?}`", base));
212             }
213             if let Some(ty::Ref(_, base_ty, _)) = base_ty {
214                 let index_ty = typeck_results.expr_ty_adjusted_opt(index).unwrap_or_else(|| {
215                     // When encountering `return [0][0]` outside of a `fn` body we would attempt
216                     // to access an nonexistent index. We assume that more relevant errors will
217                     // already have been emitted, so we only gate on this with an ICE if no
218                     // error has been emitted. (#64638)
219                     self.fcx.tcx.ty_error_with_message(
220                         e.span,
221                         &format!("bad index {:?} for base: `{:?}`", index, base),
222                     )
223                 });
224                 let index_ty = self.fcx.resolve_vars_if_possible(index_ty);
225
226                 if base_ty.builtin_index().is_some() && index_ty == self.fcx.tcx.types.usize {
227                     // Remove the method call record
228                     typeck_results.type_dependent_defs_mut().remove(e.hir_id);
229                     typeck_results.node_substs_mut().remove(e.hir_id);
230
231                     if let Some(a) = typeck_results.adjustments_mut().get_mut(base.hir_id) {
232                         // Discard the need for a mutable borrow
233
234                         // Extra adjustment made when indexing causes a drop
235                         // of size information - we need to get rid of it
236                         // Since this is "after" the other adjustment to be
237                         // discarded, we do an extra `pop()`
238                         if let Some(Adjustment {
239                             kind: Adjust::Pointer(PointerCast::Unsize), ..
240                         }) = a.pop()
241                         {
242                             // So the borrow discard actually happens here
243                             a.pop();
244                         }
245                     }
246                 }
247             }
248         }
249     }
250 }
251
252 ///////////////////////////////////////////////////////////////////////////
253 // Impl of Visitor for Resolver
254 //
255 // This is the master code which walks the AST. It delegates most of
256 // the heavy lifting to the generic visit and resolve functions
257 // below. In general, a function is made into a `visitor` if it must
258 // traffic in node-ids or update typeck results in the type context etc.
259
260 impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> {
261     fn visit_expr(&mut self, e: &'tcx hir::Expr<'tcx>) {
262         self.fix_scalar_builtin_expr(e);
263         self.fix_index_builtin_expr(e);
264
265         match e.kind {
266             hir::ExprKind::Closure(&hir::Closure { body, .. }) => {
267                 let body = self.fcx.tcx.hir().body(body);
268                 for param in body.params {
269                     self.visit_node_id(e.span, param.hir_id);
270                 }
271
272                 self.visit_body(body);
273             }
274             hir::ExprKind::Struct(_, fields, _) => {
275                 for field in fields {
276                     self.visit_field_id(field.hir_id);
277                 }
278             }
279             hir::ExprKind::Field(..) => {
280                 self.visit_field_id(e.hir_id);
281             }
282             hir::ExprKind::ConstBlock(anon_const) => {
283                 self.visit_node_id(e.span, anon_const.hir_id);
284
285                 let body = self.tcx().hir().body(anon_const.body);
286                 self.visit_body(body);
287             }
288             _ => {}
289         }
290
291         self.visit_node_id(e.span, e.hir_id);
292         intravisit::walk_expr(self, e);
293     }
294
295     fn visit_generic_param(&mut self, p: &'tcx hir::GenericParam<'tcx>) {
296         match &p.kind {
297             hir::GenericParamKind::Lifetime { .. } => {
298                 // Nothing to write back here
299             }
300             hir::GenericParamKind::Type { .. } | hir::GenericParamKind::Const { .. } => {
301                 self.tcx().sess.delay_span_bug(p.span, format!("unexpected generic param: {p:?}"));
302             }
303         }
304     }
305
306     fn visit_block(&mut self, b: &'tcx hir::Block<'tcx>) {
307         self.visit_node_id(b.span, b.hir_id);
308         intravisit::walk_block(self, b);
309     }
310
311     fn visit_pat(&mut self, p: &'tcx hir::Pat<'tcx>) {
312         match p.kind {
313             hir::PatKind::Binding(..) => {
314                 let typeck_results = self.fcx.typeck_results.borrow();
315                 if let Some(bm) =
316                     typeck_results.extract_binding_mode(self.tcx().sess, p.hir_id, p.span)
317                 {
318                     self.typeck_results.pat_binding_modes_mut().insert(p.hir_id, bm);
319                 }
320             }
321             hir::PatKind::Struct(_, fields, _) => {
322                 for field in fields {
323                     self.visit_field_id(field.hir_id);
324                 }
325             }
326             _ => {}
327         };
328
329         self.visit_pat_adjustments(p.span, p.hir_id);
330
331         self.visit_node_id(p.span, p.hir_id);
332         intravisit::walk_pat(self, p);
333     }
334
335     fn visit_local(&mut self, l: &'tcx hir::Local<'tcx>) {
336         intravisit::walk_local(self, l);
337         let var_ty = self.fcx.local_ty(l.span, l.hir_id).decl_ty;
338         let var_ty = self.resolve(var_ty, &l.span);
339         self.write_ty_to_typeck_results(l.hir_id, var_ty);
340     }
341
342     fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty<'tcx>) {
343         intravisit::walk_ty(self, hir_ty);
344         let ty = self.fcx.node_ty(hir_ty.hir_id);
345         let ty = self.resolve(ty, &hir_ty.span);
346         self.write_ty_to_typeck_results(hir_ty.hir_id, ty);
347     }
348
349     fn visit_infer(&mut self, inf: &'tcx hir::InferArg) {
350         intravisit::walk_inf(self, inf);
351         // Ignore cases where the inference is a const.
352         if let Some(ty) = self.fcx.node_ty_opt(inf.hir_id) {
353             let ty = self.resolve(ty, &inf.span);
354             self.write_ty_to_typeck_results(inf.hir_id, ty);
355         }
356     }
357 }
358
359 impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
360     fn eval_closure_size(&mut self) {
361         let mut res: FxHashMap<LocalDefId, ClosureSizeProfileData<'tcx>> = Default::default();
362         for (&closure_def_id, data) in self.fcx.typeck_results.borrow().closure_size_eval.iter() {
363             let closure_hir_id = self.tcx().hir().local_def_id_to_hir_id(closure_def_id);
364
365             let data = self.resolve(*data, &closure_hir_id);
366
367             res.insert(closure_def_id, data);
368         }
369
370         self.typeck_results.closure_size_eval = res;
371     }
372     fn visit_min_capture_map(&mut self) {
373         let mut min_captures_wb = ty::MinCaptureInformationMap::with_capacity_and_hasher(
374             self.fcx.typeck_results.borrow().closure_min_captures.len(),
375             Default::default(),
376         );
377         for (&closure_def_id, root_min_captures) in
378             self.fcx.typeck_results.borrow().closure_min_captures.iter()
379         {
380             let mut root_var_map_wb = ty::RootVariableMinCaptureList::with_capacity_and_hasher(
381                 root_min_captures.len(),
382                 Default::default(),
383             );
384             for (var_hir_id, min_list) in root_min_captures.iter() {
385                 let min_list_wb = min_list
386                     .iter()
387                     .map(|captured_place| {
388                         let locatable = captured_place.info.path_expr_id.unwrap_or_else(|| {
389                             self.tcx().hir().local_def_id_to_hir_id(closure_def_id)
390                         });
391
392                         self.resolve(captured_place.clone(), &locatable)
393                     })
394                     .collect();
395                 root_var_map_wb.insert(*var_hir_id, min_list_wb);
396             }
397             min_captures_wb.insert(closure_def_id, root_var_map_wb);
398         }
399
400         self.typeck_results.closure_min_captures = min_captures_wb;
401     }
402
403     fn visit_fake_reads_map(&mut self) {
404         let mut resolved_closure_fake_reads: FxHashMap<
405             LocalDefId,
406             Vec<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>,
407         > = Default::default();
408         for (&closure_def_id, fake_reads) in
409             self.fcx.typeck_results.borrow().closure_fake_reads.iter()
410         {
411             let mut resolved_fake_reads = Vec::<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>::new();
412             for (place, cause, hir_id) in fake_reads.iter() {
413                 let locatable = self.tcx().hir().local_def_id_to_hir_id(closure_def_id);
414
415                 let resolved_fake_read = self.resolve(place.clone(), &locatable);
416                 resolved_fake_reads.push((resolved_fake_read, *cause, *hir_id));
417             }
418             resolved_closure_fake_reads.insert(closure_def_id, resolved_fake_reads);
419         }
420         self.typeck_results.closure_fake_reads = resolved_closure_fake_reads;
421     }
422
423     fn visit_closures(&mut self) {
424         let fcx_typeck_results = self.fcx.typeck_results.borrow();
425         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
426         let common_hir_owner = fcx_typeck_results.hir_owner;
427
428         for (id, origin) in fcx_typeck_results.closure_kind_origins().iter() {
429             let hir_id = hir::HirId { owner: common_hir_owner, local_id: *id };
430             let place_span = origin.0;
431             let place = self.resolve(origin.1.clone(), &place_span);
432             self.typeck_results.closure_kind_origins_mut().insert(hir_id, (place_span, place));
433         }
434     }
435
436     fn visit_coercion_casts(&mut self) {
437         let fcx_typeck_results = self.fcx.typeck_results.borrow();
438         let fcx_coercion_casts = fcx_typeck_results.coercion_casts();
439         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
440
441         for local_id in fcx_coercion_casts {
442             self.typeck_results.set_coercion_cast(*local_id);
443         }
444     }
445
446     fn visit_user_provided_tys(&mut self) {
447         let fcx_typeck_results = self.fcx.typeck_results.borrow();
448         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
449         let common_hir_owner = fcx_typeck_results.hir_owner;
450
451         let mut errors_buffer = Vec::new();
452         for (&local_id, c_ty) in fcx_typeck_results.user_provided_types().iter() {
453             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
454
455             if cfg!(debug_assertions) && c_ty.needs_infer() {
456                 span_bug!(
457                     hir_id.to_span(self.fcx.tcx),
458                     "writeback: `{:?}` has inference variables",
459                     c_ty
460                 );
461             };
462
463             self.typeck_results.user_provided_types_mut().insert(hir_id, *c_ty);
464
465             if let ty::UserType::TypeOf(_, user_substs) = c_ty.value {
466                 if self.rustc_dump_user_substs {
467                     // This is a unit-testing mechanism.
468                     let span = self.tcx().hir().span(hir_id);
469                     // We need to buffer the errors in order to guarantee a consistent
470                     // order when emitting them.
471                     let err = self
472                         .tcx()
473                         .sess
474                         .struct_span_err(span, &format!("user substs: {:?}", user_substs));
475                     err.buffer(&mut errors_buffer);
476                 }
477             }
478         }
479
480         if !errors_buffer.is_empty() {
481             errors_buffer.sort_by_key(|diag| diag.span.primary_span());
482             for mut diag in errors_buffer.drain(..) {
483                 self.tcx().sess.diagnostic().emit_diagnostic(&mut diag);
484             }
485         }
486     }
487
488     fn visit_user_provided_sigs(&mut self) {
489         let fcx_typeck_results = self.fcx.typeck_results.borrow();
490         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
491
492         for (&def_id, c_sig) in fcx_typeck_results.user_provided_sigs.iter() {
493             if cfg!(debug_assertions) && c_sig.needs_infer() {
494                 span_bug!(
495                     self.fcx.tcx.hir().span_if_local(def_id).unwrap(),
496                     "writeback: `{:?}` has inference variables",
497                     c_sig
498                 );
499             };
500
501             self.typeck_results.user_provided_sigs.insert(def_id, *c_sig);
502         }
503     }
504
505     fn visit_generator_interior_types(&mut self) {
506         let fcx_typeck_results = self.fcx.typeck_results.borrow();
507         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
508         self.typeck_results.generator_interior_types =
509             fcx_typeck_results.generator_interior_types.clone();
510     }
511
512     #[instrument(skip(self), level = "debug")]
513     fn visit_opaque_types(&mut self) {
514         let opaque_types =
515             self.fcx.infcx.inner.borrow_mut().opaque_type_storage.take_opaque_types();
516         for (opaque_type_key, decl) in opaque_types {
517             let hidden_type = match decl.origin {
518                 hir::OpaqueTyOrigin::FnReturn(_) | hir::OpaqueTyOrigin::AsyncFn(_) => {
519                     let ty = self.resolve(decl.hidden_type.ty, &decl.hidden_type.span);
520                     struct RecursionChecker {
521                         def_id: LocalDefId,
522                     }
523                     impl<'tcx> ty::TypeVisitor<'tcx> for RecursionChecker {
524                         type BreakTy = ();
525                         fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
526                             if let ty::Opaque(def_id, _) = *t.kind() {
527                                 if def_id == self.def_id.to_def_id() {
528                                     return ControlFlow::Break(());
529                                 }
530                             }
531                             t.super_visit_with(self)
532                         }
533                     }
534                     if ty
535                         .visit_with(&mut RecursionChecker { def_id: opaque_type_key.def_id })
536                         .is_break()
537                     {
538                         return;
539                     }
540                     Some(ty)
541                 }
542                 hir::OpaqueTyOrigin::TyAlias => None,
543             };
544             self.typeck_results.concrete_opaque_types.insert(opaque_type_key.def_id, hidden_type);
545         }
546     }
547
548     fn visit_field_id(&mut self, hir_id: hir::HirId) {
549         if let Some(index) = self.fcx.typeck_results.borrow_mut().field_indices_mut().remove(hir_id)
550         {
551             self.typeck_results.field_indices_mut().insert(hir_id, index);
552         }
553     }
554
555     #[instrument(skip(self, span), level = "debug")]
556     fn visit_node_id(&mut self, span: Span, hir_id: hir::HirId) {
557         // Export associated path extensions and method resolutions.
558         if let Some(def) =
559             self.fcx.typeck_results.borrow_mut().type_dependent_defs_mut().remove(hir_id)
560         {
561             self.typeck_results.type_dependent_defs_mut().insert(hir_id, def);
562         }
563
564         // Resolve any borrowings for the node with id `node_id`
565         self.visit_adjustments(span, hir_id);
566
567         // Resolve the type of the node with id `node_id`
568         let n_ty = self.fcx.node_ty(hir_id);
569         let n_ty = self.resolve(n_ty, &span);
570         self.write_ty_to_typeck_results(hir_id, n_ty);
571         debug!(?n_ty);
572
573         // Resolve any substitutions
574         if let Some(substs) = self.fcx.typeck_results.borrow().node_substs_opt(hir_id) {
575             let substs = self.resolve(substs, &span);
576             debug!("write_substs_to_tcx({:?}, {:?})", hir_id, substs);
577             assert!(!substs.needs_infer() && !substs.has_placeholders());
578             self.typeck_results.node_substs_mut().insert(hir_id, substs);
579         }
580     }
581
582     #[instrument(skip(self, span), level = "debug")]
583     fn visit_adjustments(&mut self, span: Span, hir_id: hir::HirId) {
584         let adjustment = self.fcx.typeck_results.borrow_mut().adjustments_mut().remove(hir_id);
585         match adjustment {
586             None => {
587                 debug!("no adjustments for node");
588             }
589
590             Some(adjustment) => {
591                 let resolved_adjustment = self.resolve(adjustment, &span);
592                 debug!(?resolved_adjustment);
593                 self.typeck_results.adjustments_mut().insert(hir_id, resolved_adjustment);
594             }
595         }
596     }
597
598     #[instrument(skip(self, span), level = "debug")]
599     fn visit_pat_adjustments(&mut self, span: Span, hir_id: hir::HirId) {
600         let adjustment = self.fcx.typeck_results.borrow_mut().pat_adjustments_mut().remove(hir_id);
601         match adjustment {
602             None => {
603                 debug!("no pat_adjustments for node");
604             }
605
606             Some(adjustment) => {
607                 let resolved_adjustment = self.resolve(adjustment, &span);
608                 debug!(?resolved_adjustment);
609                 self.typeck_results.pat_adjustments_mut().insert(hir_id, resolved_adjustment);
610             }
611         }
612     }
613
614     fn visit_liberated_fn_sigs(&mut self) {
615         let fcx_typeck_results = self.fcx.typeck_results.borrow();
616         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
617         let common_hir_owner = fcx_typeck_results.hir_owner;
618
619         for (&local_id, &fn_sig) in fcx_typeck_results.liberated_fn_sigs().iter() {
620             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
621             let fn_sig = self.resolve(fn_sig, &hir_id);
622             self.typeck_results.liberated_fn_sigs_mut().insert(hir_id, fn_sig);
623         }
624     }
625
626     fn visit_fru_field_types(&mut self) {
627         let fcx_typeck_results = self.fcx.typeck_results.borrow();
628         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
629         let common_hir_owner = fcx_typeck_results.hir_owner;
630
631         for (&local_id, ftys) in fcx_typeck_results.fru_field_types().iter() {
632             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
633             let ftys = self.resolve(ftys.clone(), &hir_id);
634             self.typeck_results.fru_field_types_mut().insert(hir_id, ftys);
635         }
636     }
637
638     fn resolve<T>(&mut self, x: T, span: &dyn Locatable) -> T
639     where
640         T: TypeFoldable<'tcx>,
641     {
642         let mut resolver = Resolver::new(self.fcx, span, self.body);
643         let x = x.fold_with(&mut resolver);
644         if cfg!(debug_assertions) && x.needs_infer() {
645             span_bug!(span.to_span(self.fcx.tcx), "writeback: `{:?}` has inference variables", x);
646         }
647
648         // We may have introduced e.g. `ty::Error`, if inference failed, make sure
649         // to mark the `TypeckResults` as tainted in that case, so that downstream
650         // users of the typeck results don't produce extra errors, or worse, ICEs.
651         if resolver.replaced_with_error {
652             // FIXME(eddyb) keep track of `ErrorGuaranteed` from where the error was emitted.
653             self.typeck_results.tainted_by_errors =
654                 Some(ErrorGuaranteed::unchecked_claim_error_was_emitted());
655         }
656
657         x
658     }
659 }
660
661 pub(crate) trait Locatable {
662     fn to_span(&self, tcx: TyCtxt<'_>) -> Span;
663 }
664
665 impl Locatable for Span {
666     fn to_span(&self, _: TyCtxt<'_>) -> Span {
667         *self
668     }
669 }
670
671 impl Locatable for hir::HirId {
672     fn to_span(&self, tcx: TyCtxt<'_>) -> Span {
673         tcx.hir().span(*self)
674     }
675 }
676
677 /// The Resolver. This is the type folding engine that detects
678 /// unresolved types and so forth.
679 struct Resolver<'cx, 'tcx> {
680     tcx: TyCtxt<'tcx>,
681     infcx: &'cx InferCtxt<'cx, 'tcx>,
682     span: &'cx dyn Locatable,
683     body: &'tcx hir::Body<'tcx>,
684
685     /// Set to `true` if any `Ty` or `ty::Const` had to be replaced with an `Error`.
686     replaced_with_error: bool,
687 }
688
689 impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
690     fn new(
691         fcx: &'cx FnCtxt<'cx, 'tcx>,
692         span: &'cx dyn Locatable,
693         body: &'tcx hir::Body<'tcx>,
694     ) -> Resolver<'cx, 'tcx> {
695         Resolver { tcx: fcx.tcx, infcx: fcx, span, body, replaced_with_error: false }
696     }
697
698     fn report_type_error(&self, t: Ty<'tcx>) {
699         if !self.tcx.sess.has_errors().is_some() {
700             self.infcx
701                 .emit_inference_failure_err(
702                     Some(self.body.id()),
703                     self.span.to_span(self.tcx),
704                     t.into(),
705                     E0282,
706                     false,
707                 )
708                 .emit();
709         }
710     }
711
712     fn report_const_error(&self, c: ty::Const<'tcx>) {
713         if self.tcx.sess.has_errors().is_none() {
714             self.infcx
715                 .emit_inference_failure_err(
716                     Some(self.body.id()),
717                     self.span.to_span(self.tcx),
718                     c.into(),
719                     E0282,
720                     false,
721                 )
722                 .emit();
723         }
724     }
725 }
726
727 struct EraseEarlyRegions<'tcx> {
728     tcx: TyCtxt<'tcx>,
729 }
730
731 impl<'tcx> TypeFolder<'tcx> for EraseEarlyRegions<'tcx> {
732     fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
733         self.tcx
734     }
735     fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
736         if ty.has_type_flags(ty::TypeFlags::HAS_FREE_REGIONS) {
737             ty.super_fold_with(self)
738         } else {
739             ty
740         }
741     }
742     fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
743         if r.is_late_bound() { r } else { self.tcx.lifetimes.re_erased }
744     }
745 }
746
747 impl<'cx, 'tcx> TypeFolder<'tcx> for Resolver<'cx, 'tcx> {
748     fn tcx<'a>(&'a self) -> TyCtxt<'tcx> {
749         self.tcx
750     }
751
752     fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
753         match self.infcx.fully_resolve(t) {
754             Ok(t) => {
755                 // Do not anonymize late-bound regions
756                 // (e.g. keep `for<'a>` named `for<'a>`).
757                 // This allows NLL to generate error messages that
758                 // refer to the higher-ranked lifetime names written by the user.
759                 EraseEarlyRegions { tcx: self.tcx }.fold_ty(t)
760             }
761             Err(_) => {
762                 debug!("Resolver::fold_ty: input type `{:?}` not fully resolvable", t);
763                 self.report_type_error(t);
764                 self.replaced_with_error = true;
765                 self.tcx().ty_error()
766             }
767         }
768     }
769
770     fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
771         debug_assert!(!r.is_late_bound(), "Should not be resolving bound region.");
772         self.tcx.lifetimes.re_erased
773     }
774
775     fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> {
776         match self.infcx.fully_resolve(ct) {
777             Ok(ct) => self.tcx.erase_regions(ct),
778             Err(_) => {
779                 debug!("Resolver::fold_const: input const `{:?}` not fully resolvable", ct);
780                 self.report_const_error(ct);
781                 self.replaced_with_error = true;
782                 self.tcx().const_error(ct.ty())
783             }
784         }
785     }
786 }
787
788 ///////////////////////////////////////////////////////////////////////////
789 // During type check, we store promises with the result of trait
790 // lookup rather than the actual results (because the results are not
791 // necessarily available immediately). These routines unwind the
792 // promises. It is expected that we will have already reported any
793 // errors that may be encountered, so if the promises store an error,
794 // a dummy result is returned.