]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_typeck/src/check/writeback.rs
fix most compiler/ doctests
[rust.git] / compiler / rustc_typeck / src / check / writeback.rs
1 // Type resolution: the phase that finds all the types in the AST with
2 // unresolved type variables and replaces "ty_var" types with their
3 // substitutions.
4
5 use crate::check::FnCtxt;
6
7 use rustc_data_structures::stable_map::FxHashMap;
8 use rustc_errors::ErrorGuaranteed;
9 use rustc_hir as hir;
10 use rustc_hir::def_id::DefId;
11 use rustc_hir::intravisit::{self, Visitor};
12 use rustc_infer::infer::error_reporting::TypeAnnotationNeeded::E0282;
13 use rustc_infer::infer::InferCtxt;
14 use rustc_middle::hir::place::Place as HirPlace;
15 use rustc_middle::mir::FakeReadCause;
16 use rustc_middle::ty::adjustment::{Adjust, Adjustment, PointerCast};
17 use rustc_middle::ty::fold::{TypeFoldable, TypeFolder};
18 use rustc_middle::ty::{self, ClosureSizeProfileData, Ty, TyCtxt};
19 use rustc_span::symbol::sym;
20 use rustc_span::Span;
21
22 use std::mem;
23 use std::ops::ControlFlow;
24
25 ///////////////////////////////////////////////////////////////////////////
26 // Entry point
27
28 // During type inference, partially inferred types are
29 // represented using Type variables (ty::Infer). These don't appear in
30 // the final TypeckResults since all of the types should have been
31 // inferred once typeck is done.
32 // When type inference is running however, having to update the typeck
33 // typeck results every time a new type is inferred would be unreasonably slow,
34 // so instead all of the replacement happens at the end in
35 // resolve_type_vars_in_body, which creates a new TypeTables which
36 // doesn't contain any inference types.
37 impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
38     pub fn resolve_type_vars_in_body(
39         &self,
40         body: &'tcx hir::Body<'tcx>,
41     ) -> &'tcx ty::TypeckResults<'tcx> {
42         let item_id = self.tcx.hir().body_owner(body.id());
43         let item_def_id = self.tcx.hir().local_def_id(item_id);
44
45         // This attribute causes us to dump some writeback information
46         // in the form of errors, which is used for unit tests.
47         let rustc_dump_user_substs =
48             self.tcx.has_attr(item_def_id.to_def_id(), sym::rustc_dump_user_substs);
49
50         let mut wbcx = WritebackCx::new(self, body, rustc_dump_user_substs);
51         for param in body.params {
52             wbcx.visit_node_id(param.pat.span, param.hir_id);
53         }
54         // Type only exists for constants and statics, not functions.
55         match self.tcx.hir().body_owner_kind(item_def_id) {
56             hir::BodyOwnerKind::Const | hir::BodyOwnerKind::Static(_) => {
57                 wbcx.visit_node_id(body.value.span, item_id);
58             }
59             hir::BodyOwnerKind::Closure | hir::BodyOwnerKind::Fn => (),
60         }
61         wbcx.visit_body(body);
62         wbcx.visit_min_capture_map();
63         wbcx.eval_closure_size();
64         wbcx.visit_fake_reads_map();
65         wbcx.visit_closures();
66         wbcx.visit_liberated_fn_sigs();
67         wbcx.visit_fru_field_types();
68         wbcx.visit_opaque_types();
69         wbcx.visit_coercion_casts();
70         wbcx.visit_user_provided_tys();
71         wbcx.visit_user_provided_sigs();
72         wbcx.visit_generator_interior_types();
73
74         let used_trait_imports =
75             mem::take(&mut self.typeck_results.borrow_mut().used_trait_imports);
76         debug!("used_trait_imports({:?}) = {:?}", item_def_id, used_trait_imports);
77         wbcx.typeck_results.used_trait_imports = used_trait_imports;
78
79         wbcx.typeck_results.treat_byte_string_as_slice =
80             mem::take(&mut self.typeck_results.borrow_mut().treat_byte_string_as_slice);
81
82         if self.is_tainted_by_errors() {
83             // FIXME(eddyb) keep track of `ErrorGuaranteed` from where the error was emitted.
84             wbcx.typeck_results.tainted_by_errors =
85                 Some(ErrorGuaranteed::unchecked_claim_error_was_emitted());
86         }
87
88         debug!("writeback: typeck results for {:?} are {:#?}", item_def_id, wbcx.typeck_results);
89
90         self.tcx.arena.alloc(wbcx.typeck_results)
91     }
92 }
93
94 ///////////////////////////////////////////////////////////////////////////
95 // The Writeback context. This visitor walks the HIR, checking the
96 // fn-specific typeck results to find references to types or regions. It
97 // resolves those regions to remove inference variables and writes the
98 // final result back into the master typeck results in the tcx. Here and
99 // there, it applies a few ad-hoc checks that were not convenient to
100 // do elsewhere.
101
102 struct WritebackCx<'cx, 'tcx> {
103     fcx: &'cx FnCtxt<'cx, 'tcx>,
104
105     typeck_results: ty::TypeckResults<'tcx>,
106
107     body: &'tcx hir::Body<'tcx>,
108
109     rustc_dump_user_substs: bool,
110 }
111
112 impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
113     fn new(
114         fcx: &'cx FnCtxt<'cx, 'tcx>,
115         body: &'tcx hir::Body<'tcx>,
116         rustc_dump_user_substs: bool,
117     ) -> WritebackCx<'cx, 'tcx> {
118         let owner = body.id().hir_id.owner;
119
120         WritebackCx {
121             fcx,
122             typeck_results: ty::TypeckResults::new(owner),
123             body,
124             rustc_dump_user_substs,
125         }
126     }
127
128     fn tcx(&self) -> TyCtxt<'tcx> {
129         self.fcx.tcx
130     }
131
132     fn write_ty_to_typeck_results(&mut self, hir_id: hir::HirId, ty: Ty<'tcx>) {
133         debug!("write_ty_to_typeck_results({:?}, {:?})", hir_id, ty);
134         assert!(!ty.needs_infer() && !ty.has_placeholders() && !ty.has_free_regions());
135         self.typeck_results.node_types_mut().insert(hir_id, ty);
136     }
137
138     // Hacky hack: During type-checking, we treat *all* operators
139     // as potentially overloaded. But then, during writeback, if
140     // we observe that something like `a+b` is (known to be)
141     // operating on scalars, we clear the overload.
142     fn fix_scalar_builtin_expr(&mut self, e: &hir::Expr<'_>) {
143         match e.kind {
144             hir::ExprKind::Unary(hir::UnOp::Neg | hir::UnOp::Not, inner) => {
145                 let inner_ty = self.fcx.node_ty(inner.hir_id);
146                 let inner_ty = self.fcx.resolve_vars_if_possible(inner_ty);
147
148                 if inner_ty.is_scalar() {
149                     let mut typeck_results = self.fcx.typeck_results.borrow_mut();
150                     typeck_results.type_dependent_defs_mut().remove(e.hir_id);
151                     typeck_results.node_substs_mut().remove(e.hir_id);
152                 }
153             }
154             hir::ExprKind::Binary(ref op, lhs, rhs) | hir::ExprKind::AssignOp(ref op, lhs, rhs) => {
155                 let lhs_ty = self.fcx.node_ty(lhs.hir_id);
156                 let lhs_ty = self.fcx.resolve_vars_if_possible(lhs_ty);
157
158                 let rhs_ty = self.fcx.node_ty(rhs.hir_id);
159                 let rhs_ty = self.fcx.resolve_vars_if_possible(rhs_ty);
160
161                 if lhs_ty.is_scalar() && rhs_ty.is_scalar() {
162                     let mut typeck_results = self.fcx.typeck_results.borrow_mut();
163                     typeck_results.type_dependent_defs_mut().remove(e.hir_id);
164                     typeck_results.node_substs_mut().remove(e.hir_id);
165
166                     match e.kind {
167                         hir::ExprKind::Binary(..) => {
168                             if !op.node.is_by_value() {
169                                 let mut adjustments = typeck_results.adjustments_mut();
170                                 if let Some(a) = adjustments.get_mut(lhs.hir_id) {
171                                     a.pop();
172                                 }
173                                 if let Some(a) = adjustments.get_mut(rhs.hir_id) {
174                                     a.pop();
175                                 }
176                             }
177                         }
178                         hir::ExprKind::AssignOp(..)
179                             if let Some(a) = typeck_results.adjustments_mut().get_mut(lhs.hir_id) =>
180                         {
181                             a.pop();
182                         }
183                         _ => {}
184                     }
185                 }
186             }
187             _ => {}
188         }
189     }
190
191     // Similar to operators, indexing is always assumed to be overloaded
192     // Here, correct cases where an indexing expression can be simplified
193     // to use builtin indexing because the index type is known to be
194     // usize-ish
195     fn fix_index_builtin_expr(&mut self, e: &hir::Expr<'_>) {
196         if let hir::ExprKind::Index(ref base, ref index) = e.kind {
197             let mut typeck_results = self.fcx.typeck_results.borrow_mut();
198
199             // All valid indexing looks like this; might encounter non-valid indexes at this point.
200             let base_ty = typeck_results
201                 .expr_ty_adjusted_opt(base)
202                 .map(|t| self.fcx.resolve_vars_if_possible(t).kind());
203             if base_ty.is_none() {
204                 // When encountering `return [0][0]` outside of a `fn` body we can encounter a base
205                 // that isn't in the type table. We assume more relevant errors have already been
206                 // emitted, so we delay an ICE if none have. (#64638)
207                 self.tcx().sess.delay_span_bug(e.span, &format!("bad base: `{:?}`", base));
208             }
209             if let Some(ty::Ref(_, base_ty, _)) = base_ty {
210                 let index_ty = typeck_results.expr_ty_adjusted_opt(index).unwrap_or_else(|| {
211                     // When encountering `return [0][0]` outside of a `fn` body we would attempt
212                     // to access an nonexistent index. We assume that more relevant errors will
213                     // already have been emitted, so we only gate on this with an ICE if no
214                     // error has been emitted. (#64638)
215                     self.fcx.tcx.ty_error_with_message(
216                         e.span,
217                         &format!("bad index {:?} for base: `{:?}`", index, base),
218                     )
219                 });
220                 let index_ty = self.fcx.resolve_vars_if_possible(index_ty);
221
222                 if base_ty.builtin_index().is_some() && index_ty == self.fcx.tcx.types.usize {
223                     // Remove the method call record
224                     typeck_results.type_dependent_defs_mut().remove(e.hir_id);
225                     typeck_results.node_substs_mut().remove(e.hir_id);
226
227                     if let Some(a) = typeck_results.adjustments_mut().get_mut(base.hir_id) {
228                         // Discard the need for a mutable borrow
229
230                         // Extra adjustment made when indexing causes a drop
231                         // of size information - we need to get rid of it
232                         // Since this is "after" the other adjustment to be
233                         // discarded, we do an extra `pop()`
234                         if let Some(Adjustment {
235                             kind: Adjust::Pointer(PointerCast::Unsize), ..
236                         }) = a.pop()
237                         {
238                             // So the borrow discard actually happens here
239                             a.pop();
240                         }
241                     }
242                 }
243             }
244         }
245     }
246 }
247
248 ///////////////////////////////////////////////////////////////////////////
249 // Impl of Visitor for Resolver
250 //
251 // This is the master code which walks the AST. It delegates most of
252 // the heavy lifting to the generic visit and resolve functions
253 // below. In general, a function is made into a `visitor` if it must
254 // traffic in node-ids or update typeck results in the type context etc.
255
256 impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> {
257     fn visit_expr(&mut self, e: &'tcx hir::Expr<'tcx>) {
258         self.fix_scalar_builtin_expr(e);
259         self.fix_index_builtin_expr(e);
260
261         self.visit_node_id(e.span, e.hir_id);
262
263         match e.kind {
264             hir::ExprKind::Closure(_, _, body, _, _) => {
265                 let body = self.fcx.tcx.hir().body(body);
266                 for param in body.params {
267                     self.visit_node_id(e.span, param.hir_id);
268                 }
269
270                 self.visit_body(body);
271             }
272             hir::ExprKind::Struct(_, fields, _) => {
273                 for field in fields {
274                     self.visit_field_id(field.hir_id);
275                 }
276             }
277             hir::ExprKind::Field(..) => {
278                 self.visit_field_id(e.hir_id);
279             }
280             hir::ExprKind::ConstBlock(anon_const) => {
281                 self.visit_node_id(e.span, anon_const.hir_id);
282
283                 let body = self.tcx().hir().body(anon_const.body);
284                 self.visit_body(body);
285             }
286             _ => {}
287         }
288
289         intravisit::walk_expr(self, e);
290     }
291
292     fn visit_block(&mut self, b: &'tcx hir::Block<'tcx>) {
293         self.visit_node_id(b.span, b.hir_id);
294         intravisit::walk_block(self, b);
295     }
296
297     fn visit_pat(&mut self, p: &'tcx hir::Pat<'tcx>) {
298         match p.kind {
299             hir::PatKind::Binding(..) => {
300                 let typeck_results = self.fcx.typeck_results.borrow();
301                 if let Some(bm) =
302                     typeck_results.extract_binding_mode(self.tcx().sess, p.hir_id, p.span)
303                 {
304                     self.typeck_results.pat_binding_modes_mut().insert(p.hir_id, bm);
305                 }
306             }
307             hir::PatKind::Struct(_, fields, _) => {
308                 for field in fields {
309                     self.visit_field_id(field.hir_id);
310                 }
311             }
312             _ => {}
313         };
314
315         self.visit_pat_adjustments(p.span, p.hir_id);
316
317         self.visit_node_id(p.span, p.hir_id);
318         intravisit::walk_pat(self, p);
319     }
320
321     fn visit_local(&mut self, l: &'tcx hir::Local<'tcx>) {
322         intravisit::walk_local(self, l);
323         let var_ty = self.fcx.local_ty(l.span, l.hir_id).decl_ty;
324         let var_ty = self.resolve(var_ty, &l.span);
325         self.write_ty_to_typeck_results(l.hir_id, var_ty);
326     }
327
328     fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty<'tcx>) {
329         intravisit::walk_ty(self, hir_ty);
330         let ty = self.fcx.node_ty(hir_ty.hir_id);
331         let ty = self.resolve(ty, &hir_ty.span);
332         self.write_ty_to_typeck_results(hir_ty.hir_id, ty);
333     }
334
335     fn visit_infer(&mut self, inf: &'tcx hir::InferArg) {
336         intravisit::walk_inf(self, inf);
337         // Ignore cases where the inference is a const.
338         if let Some(ty) = self.fcx.node_ty_opt(inf.hir_id) {
339             let ty = self.resolve(ty, &inf.span);
340             self.write_ty_to_typeck_results(inf.hir_id, ty);
341         }
342     }
343 }
344
345 impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
346     fn eval_closure_size(&mut self) {
347         let mut res: FxHashMap<DefId, ClosureSizeProfileData<'tcx>> = Default::default();
348         for (closure_def_id, data) in self.fcx.typeck_results.borrow().closure_size_eval.iter() {
349             let closure_hir_id =
350                 self.tcx().hir().local_def_id_to_hir_id(closure_def_id.expect_local());
351
352             let data = self.resolve(*data, &closure_hir_id);
353
354             res.insert(*closure_def_id, data);
355         }
356
357         self.typeck_results.closure_size_eval = res;
358     }
359     fn visit_min_capture_map(&mut self) {
360         let mut min_captures_wb = ty::MinCaptureInformationMap::with_capacity_and_hasher(
361             self.fcx.typeck_results.borrow().closure_min_captures.len(),
362             Default::default(),
363         );
364         for (closure_def_id, root_min_captures) in
365             self.fcx.typeck_results.borrow().closure_min_captures.iter()
366         {
367             let mut root_var_map_wb = ty::RootVariableMinCaptureList::with_capacity_and_hasher(
368                 root_min_captures.len(),
369                 Default::default(),
370             );
371             for (var_hir_id, min_list) in root_min_captures.iter() {
372                 let min_list_wb = min_list
373                     .iter()
374                     .map(|captured_place| {
375                         let locatable = captured_place.info.path_expr_id.unwrap_or_else(|| {
376                             self.tcx().hir().local_def_id_to_hir_id(closure_def_id.expect_local())
377                         });
378
379                         self.resolve(captured_place.clone(), &locatable)
380                     })
381                     .collect();
382                 root_var_map_wb.insert(*var_hir_id, min_list_wb);
383             }
384             min_captures_wb.insert(*closure_def_id, root_var_map_wb);
385         }
386
387         self.typeck_results.closure_min_captures = min_captures_wb;
388     }
389
390     fn visit_fake_reads_map(&mut self) {
391         let mut resolved_closure_fake_reads: FxHashMap<
392             DefId,
393             Vec<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>,
394         > = Default::default();
395         for (closure_def_id, fake_reads) in
396             self.fcx.typeck_results.borrow().closure_fake_reads.iter()
397         {
398             let mut resolved_fake_reads = Vec::<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>::new();
399             for (place, cause, hir_id) in fake_reads.iter() {
400                 let locatable =
401                     self.tcx().hir().local_def_id_to_hir_id(closure_def_id.expect_local());
402
403                 let resolved_fake_read = self.resolve(place.clone(), &locatable);
404                 resolved_fake_reads.push((resolved_fake_read, *cause, *hir_id));
405             }
406             resolved_closure_fake_reads.insert(*closure_def_id, resolved_fake_reads);
407         }
408         self.typeck_results.closure_fake_reads = resolved_closure_fake_reads;
409     }
410
411     fn visit_closures(&mut self) {
412         let fcx_typeck_results = self.fcx.typeck_results.borrow();
413         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
414         let common_hir_owner = fcx_typeck_results.hir_owner;
415
416         for (id, origin) in fcx_typeck_results.closure_kind_origins().iter() {
417             let hir_id = hir::HirId { owner: common_hir_owner, local_id: *id };
418             let place_span = origin.0;
419             let place = self.resolve(origin.1.clone(), &place_span);
420             self.typeck_results.closure_kind_origins_mut().insert(hir_id, (place_span, place));
421         }
422     }
423
424     fn visit_coercion_casts(&mut self) {
425         let fcx_typeck_results = self.fcx.typeck_results.borrow();
426         let fcx_coercion_casts = fcx_typeck_results.coercion_casts();
427         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
428
429         for local_id in fcx_coercion_casts {
430             self.typeck_results.set_coercion_cast(*local_id);
431         }
432     }
433
434     fn visit_user_provided_tys(&mut self) {
435         let fcx_typeck_results = self.fcx.typeck_results.borrow();
436         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
437         let common_hir_owner = fcx_typeck_results.hir_owner;
438
439         let mut errors_buffer = Vec::new();
440         for (&local_id, c_ty) in fcx_typeck_results.user_provided_types().iter() {
441             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
442
443             if cfg!(debug_assertions) && c_ty.needs_infer() {
444                 span_bug!(
445                     hir_id.to_span(self.fcx.tcx),
446                     "writeback: `{:?}` has inference variables",
447                     c_ty
448                 );
449             };
450
451             self.typeck_results.user_provided_types_mut().insert(hir_id, *c_ty);
452
453             if let ty::UserType::TypeOf(_, user_substs) = c_ty.value {
454                 if self.rustc_dump_user_substs {
455                     // This is a unit-testing mechanism.
456                     let span = self.tcx().hir().span(hir_id);
457                     // We need to buffer the errors in order to guarantee a consistent
458                     // order when emitting them.
459                     let err = self
460                         .tcx()
461                         .sess
462                         .struct_span_err(span, &format!("user substs: {:?}", user_substs));
463                     err.buffer(&mut errors_buffer);
464                 }
465             }
466         }
467
468         if !errors_buffer.is_empty() {
469             errors_buffer.sort_by_key(|diag| diag.span.primary_span());
470             for mut diag in errors_buffer.drain(..) {
471                 self.tcx().sess.diagnostic().emit_diagnostic(&mut diag);
472             }
473         }
474     }
475
476     fn visit_user_provided_sigs(&mut self) {
477         let fcx_typeck_results = self.fcx.typeck_results.borrow();
478         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
479
480         for (&def_id, c_sig) in fcx_typeck_results.user_provided_sigs.iter() {
481             if cfg!(debug_assertions) && c_sig.needs_infer() {
482                 span_bug!(
483                     self.fcx.tcx.hir().span_if_local(def_id).unwrap(),
484                     "writeback: `{:?}` has inference variables",
485                     c_sig
486                 );
487             };
488
489             self.typeck_results.user_provided_sigs.insert(def_id, *c_sig);
490         }
491     }
492
493     fn visit_generator_interior_types(&mut self) {
494         let fcx_typeck_results = self.fcx.typeck_results.borrow();
495         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
496         self.typeck_results.generator_interior_types =
497             fcx_typeck_results.generator_interior_types.clone();
498     }
499
500     #[instrument(skip(self), level = "debug")]
501     fn visit_opaque_types(&mut self) {
502         let opaque_types =
503             self.fcx.infcx.inner.borrow_mut().opaque_type_storage.take_opaque_types();
504         for (opaque_type_key, decl) in opaque_types {
505             let hidden_type = match decl.origin {
506                 hir::OpaqueTyOrigin::FnReturn(_) | hir::OpaqueTyOrigin::AsyncFn(_) => {
507                     let ty = self.resolve(decl.hidden_type.ty, &decl.hidden_type.span);
508                     struct RecursionChecker {
509                         def_id: DefId,
510                     }
511                     impl<'tcx> ty::TypeVisitor<'tcx> for RecursionChecker {
512                         type BreakTy = ();
513                         fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
514                             if let ty::Opaque(def_id, _) = *t.kind() {
515                                 if def_id == self.def_id {
516                                     return ControlFlow::Break(());
517                                 }
518                             }
519                             t.super_visit_with(self)
520                         }
521                     }
522                     if ty
523                         .visit_with(&mut RecursionChecker { def_id: opaque_type_key.def_id })
524                         .is_break()
525                     {
526                         return;
527                     }
528                     Some(ty)
529                 }
530                 hir::OpaqueTyOrigin::TyAlias => None,
531             };
532             self.typeck_results.concrete_opaque_types.insert(opaque_type_key.def_id, hidden_type);
533         }
534     }
535
536     fn visit_field_id(&mut self, hir_id: hir::HirId) {
537         if let Some(index) = self.fcx.typeck_results.borrow_mut().field_indices_mut().remove(hir_id)
538         {
539             self.typeck_results.field_indices_mut().insert(hir_id, index);
540         }
541     }
542
543     #[instrument(skip(self, span), level = "debug")]
544     fn visit_node_id(&mut self, span: Span, hir_id: hir::HirId) {
545         // Export associated path extensions and method resolutions.
546         if let Some(def) =
547             self.fcx.typeck_results.borrow_mut().type_dependent_defs_mut().remove(hir_id)
548         {
549             self.typeck_results.type_dependent_defs_mut().insert(hir_id, def);
550         }
551
552         // Resolve any borrowings for the node with id `node_id`
553         self.visit_adjustments(span, hir_id);
554
555         // Resolve the type of the node with id `node_id`
556         let n_ty = self.fcx.node_ty(hir_id);
557         let n_ty = self.resolve(n_ty, &span);
558         self.write_ty_to_typeck_results(hir_id, n_ty);
559         debug!(?n_ty);
560
561         // Resolve any substitutions
562         if let Some(substs) = self.fcx.typeck_results.borrow().node_substs_opt(hir_id) {
563             let substs = self.resolve(substs, &span);
564             debug!("write_substs_to_tcx({:?}, {:?})", hir_id, substs);
565             assert!(!substs.needs_infer() && !substs.has_placeholders());
566             self.typeck_results.node_substs_mut().insert(hir_id, substs);
567         }
568     }
569
570     #[instrument(skip(self, span), level = "debug")]
571     fn visit_adjustments(&mut self, span: Span, hir_id: hir::HirId) {
572         let adjustment = self.fcx.typeck_results.borrow_mut().adjustments_mut().remove(hir_id);
573         match adjustment {
574             None => {
575                 debug!("no adjustments for node");
576             }
577
578             Some(adjustment) => {
579                 let resolved_adjustment = self.resolve(adjustment, &span);
580                 debug!(?resolved_adjustment);
581                 self.typeck_results.adjustments_mut().insert(hir_id, resolved_adjustment);
582             }
583         }
584     }
585
586     #[instrument(skip(self, span), level = "debug")]
587     fn visit_pat_adjustments(&mut self, span: Span, hir_id: hir::HirId) {
588         let adjustment = self.fcx.typeck_results.borrow_mut().pat_adjustments_mut().remove(hir_id);
589         match adjustment {
590             None => {
591                 debug!("no pat_adjustments for node");
592             }
593
594             Some(adjustment) => {
595                 let resolved_adjustment = self.resolve(adjustment, &span);
596                 debug!(?resolved_adjustment);
597                 self.typeck_results.pat_adjustments_mut().insert(hir_id, resolved_adjustment);
598             }
599         }
600     }
601
602     fn visit_liberated_fn_sigs(&mut self) {
603         let fcx_typeck_results = self.fcx.typeck_results.borrow();
604         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
605         let common_hir_owner = fcx_typeck_results.hir_owner;
606
607         for (&local_id, &fn_sig) in fcx_typeck_results.liberated_fn_sigs().iter() {
608             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
609             let fn_sig = self.resolve(fn_sig, &hir_id);
610             self.typeck_results.liberated_fn_sigs_mut().insert(hir_id, fn_sig);
611         }
612     }
613
614     fn visit_fru_field_types(&mut self) {
615         let fcx_typeck_results = self.fcx.typeck_results.borrow();
616         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
617         let common_hir_owner = fcx_typeck_results.hir_owner;
618
619         for (&local_id, ftys) in fcx_typeck_results.fru_field_types().iter() {
620             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
621             let ftys = self.resolve(ftys.clone(), &hir_id);
622             self.typeck_results.fru_field_types_mut().insert(hir_id, ftys);
623         }
624     }
625
626     fn resolve<T>(&mut self, x: T, span: &dyn Locatable) -> T
627     where
628         T: TypeFoldable<'tcx>,
629     {
630         let mut resolver = Resolver::new(self.fcx, span, self.body);
631         let x = x.fold_with(&mut resolver);
632         if cfg!(debug_assertions) && x.needs_infer() {
633             span_bug!(span.to_span(self.fcx.tcx), "writeback: `{:?}` has inference variables", x);
634         }
635
636         // We may have introduced e.g. `ty::Error`, if inference failed, make sure
637         // to mark the `TypeckResults` as tainted in that case, so that downstream
638         // users of the typeck results don't produce extra errors, or worse, ICEs.
639         if resolver.replaced_with_error {
640             // FIXME(eddyb) keep track of `ErrorGuaranteed` from where the error was emitted.
641             self.typeck_results.tainted_by_errors =
642                 Some(ErrorGuaranteed::unchecked_claim_error_was_emitted());
643         }
644
645         x
646     }
647 }
648
649 crate trait Locatable {
650     fn to_span(&self, tcx: TyCtxt<'_>) -> Span;
651 }
652
653 impl Locatable for Span {
654     fn to_span(&self, _: TyCtxt<'_>) -> Span {
655         *self
656     }
657 }
658
659 impl Locatable for hir::HirId {
660     fn to_span(&self, tcx: TyCtxt<'_>) -> Span {
661         tcx.hir().span(*self)
662     }
663 }
664
665 /// The Resolver. This is the type folding engine that detects
666 /// unresolved types and so forth.
667 struct Resolver<'cx, 'tcx> {
668     tcx: TyCtxt<'tcx>,
669     infcx: &'cx InferCtxt<'cx, 'tcx>,
670     span: &'cx dyn Locatable,
671     body: &'tcx hir::Body<'tcx>,
672
673     /// Set to `true` if any `Ty` or `ty::Const` had to be replaced with an `Error`.
674     replaced_with_error: bool,
675 }
676
677 impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
678     fn new(
679         fcx: &'cx FnCtxt<'cx, 'tcx>,
680         span: &'cx dyn Locatable,
681         body: &'tcx hir::Body<'tcx>,
682     ) -> Resolver<'cx, 'tcx> {
683         Resolver { tcx: fcx.tcx, infcx: fcx, span, body, replaced_with_error: false }
684     }
685
686     fn report_type_error(&self, t: Ty<'tcx>) {
687         if !self.tcx.sess.has_errors().is_some() {
688             self.infcx
689                 .emit_inference_failure_err(
690                     Some(self.body.id()),
691                     self.span.to_span(self.tcx),
692                     t.into(),
693                     vec![],
694                     E0282,
695                 )
696                 .emit();
697         }
698     }
699
700     fn report_const_error(&self, c: ty::Const<'tcx>) {
701         if self.tcx.sess.has_errors().is_none() {
702             self.infcx
703                 .emit_inference_failure_err(
704                     Some(self.body.id()),
705                     self.span.to_span(self.tcx),
706                     c.into(),
707                     vec![],
708                     E0282,
709                 )
710                 .emit();
711         }
712     }
713 }
714
715 struct EraseEarlyRegions<'tcx> {
716     tcx: TyCtxt<'tcx>,
717 }
718
719 impl<'tcx> TypeFolder<'tcx> for EraseEarlyRegions<'tcx> {
720     fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
721         self.tcx
722     }
723     fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
724         if ty.has_type_flags(ty::TypeFlags::HAS_FREE_REGIONS) {
725             ty.super_fold_with(self)
726         } else {
727             ty
728         }
729     }
730     fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
731         if r.is_late_bound() { r } else { self.tcx.lifetimes.re_erased }
732     }
733 }
734
735 impl<'cx, 'tcx> TypeFolder<'tcx> for Resolver<'cx, 'tcx> {
736     fn tcx<'a>(&'a self) -> TyCtxt<'tcx> {
737         self.tcx
738     }
739
740     fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
741         match self.infcx.fully_resolve(t) {
742             Ok(t) => {
743                 // Do not anonymize late-bound regions
744                 // (e.g. keep `for<'a>` named `for<'a>`).
745                 // This allows NLL to generate error messages that
746                 // refer to the higher-ranked lifetime names written by the user.
747                 EraseEarlyRegions { tcx: self.infcx.tcx }.fold_ty(t)
748             }
749             Err(_) => {
750                 debug!("Resolver::fold_ty: input type `{:?}` not fully resolvable", t);
751                 self.report_type_error(t);
752                 self.replaced_with_error = true;
753                 self.tcx().ty_error()
754             }
755         }
756     }
757
758     fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
759         debug_assert!(!r.is_late_bound(), "Should not be resolving bound region.");
760         self.tcx.lifetimes.re_erased
761     }
762
763     fn fold_const(&mut self, ct: ty::Const<'tcx>) -> ty::Const<'tcx> {
764         match self.infcx.fully_resolve(ct) {
765             Ok(ct) => self.infcx.tcx.erase_regions(ct),
766             Err(_) => {
767                 debug!("Resolver::fold_const: input const `{:?}` not fully resolvable", ct);
768                 self.report_const_error(ct);
769                 self.replaced_with_error = true;
770                 self.tcx().const_error(ct.ty())
771             }
772         }
773     }
774 }
775
776 ///////////////////////////////////////////////////////////////////////////
777 // During type check, we store promises with the result of trait
778 // lookup rather than the actual results (because the results are not
779 // necessarily available immediately). These routines unwind the
780 // promises. It is expected that we will have already reported any
781 // errors that may be encountered, so if the promises store an error,
782 // a dummy result is returned.