]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_typeck/src/check/writeback.rs
Rollup merge of #93568 - willcrichton:scrape-examples-leading-whitespace, r=CraftSpider
[rust.git] / compiler / rustc_typeck / src / check / writeback.rs
1 // Type resolution: the phase that finds all the types in the AST with
2 // unresolved type variables and replaces "ty_var" types with their
3 // substitutions.
4
5 use crate::check::FnCtxt;
6
7 use rustc_data_structures::stable_map::FxHashMap;
8 use rustc_errors::ErrorReported;
9 use rustc_hir as hir;
10 use rustc_hir::def_id::DefId;
11 use rustc_hir::intravisit::{self, Visitor};
12 use rustc_infer::infer::error_reporting::TypeAnnotationNeeded::E0282;
13 use rustc_infer::infer::InferCtxt;
14 use rustc_middle::hir::place::Place as HirPlace;
15 use rustc_middle::mir::FakeReadCause;
16 use rustc_middle::ty::adjustment::{Adjust, Adjustment, PointerCast};
17 use rustc_middle::ty::fold::{TypeFoldable, TypeFolder};
18 use rustc_middle::ty::{self, ClosureSizeProfileData, Ty, TyCtxt};
19 use rustc_span::symbol::sym;
20 use rustc_span::Span;
21
22 use std::mem;
23
24 ///////////////////////////////////////////////////////////////////////////
25 // Entry point
26
27 // During type inference, partially inferred types are
28 // represented using Type variables (ty::Infer). These don't appear in
29 // the final TypeckResults since all of the types should have been
30 // inferred once typeck is done.
31 // When type inference is running however, having to update the typeck
32 // typeck results every time a new type is inferred would be unreasonably slow,
33 // so instead all of the replacement happens at the end in
34 // resolve_type_vars_in_body, which creates a new TypeTables which
35 // doesn't contain any inference types.
36 impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
37     pub fn resolve_type_vars_in_body(
38         &self,
39         body: &'tcx hir::Body<'tcx>,
40     ) -> &'tcx ty::TypeckResults<'tcx> {
41         let item_id = self.tcx.hir().body_owner(body.id());
42         let item_def_id = self.tcx.hir().local_def_id(item_id);
43
44         // This attribute causes us to dump some writeback information
45         // in the form of errors, which is used for unit tests.
46         let rustc_dump_user_substs =
47             self.tcx.has_attr(item_def_id.to_def_id(), sym::rustc_dump_user_substs);
48
49         let mut wbcx = WritebackCx::new(self, body, rustc_dump_user_substs);
50         for param in body.params {
51             wbcx.visit_node_id(param.pat.span, param.hir_id);
52         }
53         // Type only exists for constants and statics, not functions.
54         match self.tcx.hir().body_owner_kind(item_id) {
55             hir::BodyOwnerKind::Const | hir::BodyOwnerKind::Static(_) => {
56                 wbcx.visit_node_id(body.value.span, item_id);
57             }
58             hir::BodyOwnerKind::Closure | hir::BodyOwnerKind::Fn => (),
59         }
60         wbcx.visit_body(body);
61         wbcx.visit_min_capture_map();
62         wbcx.eval_closure_size();
63         wbcx.visit_fake_reads_map();
64         wbcx.visit_closures();
65         wbcx.visit_liberated_fn_sigs();
66         wbcx.visit_fru_field_types();
67         wbcx.visit_opaque_types();
68         wbcx.visit_coercion_casts();
69         wbcx.visit_user_provided_tys();
70         wbcx.visit_user_provided_sigs();
71         wbcx.visit_generator_interior_types();
72
73         let used_trait_imports =
74             mem::take(&mut self.typeck_results.borrow_mut().used_trait_imports);
75         debug!("used_trait_imports({:?}) = {:?}", item_def_id, used_trait_imports);
76         wbcx.typeck_results.used_trait_imports = used_trait_imports;
77
78         wbcx.typeck_results.treat_byte_string_as_slice =
79             mem::take(&mut self.typeck_results.borrow_mut().treat_byte_string_as_slice);
80
81         if self.is_tainted_by_errors() {
82             // FIXME(eddyb) keep track of `ErrorReported` from where the error was emitted.
83             wbcx.typeck_results.tainted_by_errors = Some(ErrorReported);
84         }
85
86         debug!("writeback: typeck results for {:?} are {:#?}", item_def_id, wbcx.typeck_results);
87
88         self.tcx.arena.alloc(wbcx.typeck_results)
89     }
90 }
91
92 ///////////////////////////////////////////////////////////////////////////
93 // The Writeback context. This visitor walks the HIR, checking the
94 // fn-specific typeck results to find references to types or regions. It
95 // resolves those regions to remove inference variables and writes the
96 // final result back into the master typeck results in the tcx. Here and
97 // there, it applies a few ad-hoc checks that were not convenient to
98 // do elsewhere.
99
100 struct WritebackCx<'cx, 'tcx> {
101     fcx: &'cx FnCtxt<'cx, 'tcx>,
102
103     typeck_results: ty::TypeckResults<'tcx>,
104
105     body: &'tcx hir::Body<'tcx>,
106
107     rustc_dump_user_substs: bool,
108 }
109
110 impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
111     fn new(
112         fcx: &'cx FnCtxt<'cx, 'tcx>,
113         body: &'tcx hir::Body<'tcx>,
114         rustc_dump_user_substs: bool,
115     ) -> WritebackCx<'cx, 'tcx> {
116         let owner = body.id().hir_id.owner;
117
118         WritebackCx {
119             fcx,
120             typeck_results: ty::TypeckResults::new(owner),
121             body,
122             rustc_dump_user_substs,
123         }
124     }
125
126     fn tcx(&self) -> TyCtxt<'tcx> {
127         self.fcx.tcx
128     }
129
130     fn write_ty_to_typeck_results(&mut self, hir_id: hir::HirId, ty: Ty<'tcx>) {
131         debug!("write_ty_to_typeck_results({:?}, {:?})", hir_id, ty);
132         assert!(!ty.needs_infer() && !ty.has_placeholders() && !ty.has_free_regions());
133         self.typeck_results.node_types_mut().insert(hir_id, ty);
134     }
135
136     // Hacky hack: During type-checking, we treat *all* operators
137     // as potentially overloaded. But then, during writeback, if
138     // we observe that something like `a+b` is (known to be)
139     // operating on scalars, we clear the overload.
140     fn fix_scalar_builtin_expr(&mut self, e: &hir::Expr<'_>) {
141         match e.kind {
142             hir::ExprKind::Unary(hir::UnOp::Neg | hir::UnOp::Not, inner) => {
143                 let inner_ty = self.fcx.node_ty(inner.hir_id);
144                 let inner_ty = self.fcx.resolve_vars_if_possible(inner_ty);
145
146                 if inner_ty.is_scalar() {
147                     let mut typeck_results = self.fcx.typeck_results.borrow_mut();
148                     typeck_results.type_dependent_defs_mut().remove(e.hir_id);
149                     typeck_results.node_substs_mut().remove(e.hir_id);
150                 }
151             }
152             hir::ExprKind::Binary(ref op, lhs, rhs) | hir::ExprKind::AssignOp(ref op, lhs, rhs) => {
153                 let lhs_ty = self.fcx.node_ty(lhs.hir_id);
154                 let lhs_ty = self.fcx.resolve_vars_if_possible(lhs_ty);
155
156                 let rhs_ty = self.fcx.node_ty(rhs.hir_id);
157                 let rhs_ty = self.fcx.resolve_vars_if_possible(rhs_ty);
158
159                 if lhs_ty.is_scalar() && rhs_ty.is_scalar() {
160                     let mut typeck_results = self.fcx.typeck_results.borrow_mut();
161                     typeck_results.type_dependent_defs_mut().remove(e.hir_id);
162                     typeck_results.node_substs_mut().remove(e.hir_id);
163
164                     match e.kind {
165                         hir::ExprKind::Binary(..) => {
166                             if !op.node.is_by_value() {
167                                 let mut adjustments = typeck_results.adjustments_mut();
168                                 if let Some(a) = adjustments.get_mut(lhs.hir_id) {
169                                     a.pop();
170                                 }
171                                 if let Some(a) = adjustments.get_mut(rhs.hir_id) {
172                                     a.pop();
173                                 }
174                             }
175                         }
176                         hir::ExprKind::AssignOp(..)
177                             if let Some(a) = typeck_results.adjustments_mut().get_mut(lhs.hir_id) =>
178                         {
179                             a.pop();
180                         }
181                         _ => {}
182                     }
183                 }
184             }
185             _ => {}
186         }
187     }
188
189     // Similar to operators, indexing is always assumed to be overloaded
190     // Here, correct cases where an indexing expression can be simplified
191     // to use builtin indexing because the index type is known to be
192     // usize-ish
193     fn fix_index_builtin_expr(&mut self, e: &hir::Expr<'_>) {
194         if let hir::ExprKind::Index(ref base, ref index) = e.kind {
195             let mut typeck_results = self.fcx.typeck_results.borrow_mut();
196
197             // All valid indexing looks like this; might encounter non-valid indexes at this point.
198             let base_ty = typeck_results
199                 .expr_ty_adjusted_opt(base)
200                 .map(|t| self.fcx.resolve_vars_if_possible(t).kind());
201             if base_ty.is_none() {
202                 // When encountering `return [0][0]` outside of a `fn` body we can encounter a base
203                 // that isn't in the type table. We assume more relevant errors have already been
204                 // emitted, so we delay an ICE if none have. (#64638)
205                 self.tcx().sess.delay_span_bug(e.span, &format!("bad base: `{:?}`", base));
206             }
207             if let Some(ty::Ref(_, base_ty, _)) = base_ty {
208                 let index_ty = typeck_results.expr_ty_adjusted_opt(index).unwrap_or_else(|| {
209                     // When encountering `return [0][0]` outside of a `fn` body we would attempt
210                     // to access an unexistend index. We assume that more relevant errors will
211                     // already have been emitted, so we only gate on this with an ICE if no
212                     // error has been emitted. (#64638)
213                     self.fcx.tcx.ty_error_with_message(
214                         e.span,
215                         &format!("bad index {:?} for base: `{:?}`", index, base),
216                     )
217                 });
218                 let index_ty = self.fcx.resolve_vars_if_possible(index_ty);
219
220                 if base_ty.builtin_index().is_some() && index_ty == self.fcx.tcx.types.usize {
221                     // Remove the method call record
222                     typeck_results.type_dependent_defs_mut().remove(e.hir_id);
223                     typeck_results.node_substs_mut().remove(e.hir_id);
224
225                     if let Some(a) = typeck_results.adjustments_mut().get_mut(base.hir_id) {
226                         // Discard the need for a mutable borrow
227
228                         // Extra adjustment made when indexing causes a drop
229                         // of size information - we need to get rid of it
230                         // Since this is "after" the other adjustment to be
231                         // discarded, we do an extra `pop()`
232                         if let Some(Adjustment {
233                             kind: Adjust::Pointer(PointerCast::Unsize), ..
234                         }) = a.pop()
235                         {
236                             // So the borrow discard actually happens here
237                             a.pop();
238                         }
239                     }
240                 }
241             }
242         }
243     }
244 }
245
246 ///////////////////////////////////////////////////////////////////////////
247 // Impl of Visitor for Resolver
248 //
249 // This is the master code which walks the AST. It delegates most of
250 // the heavy lifting to the generic visit and resolve functions
251 // below. In general, a function is made into a `visitor` if it must
252 // traffic in node-ids or update typeck results in the type context etc.
253
254 impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> {
255     fn visit_expr(&mut self, e: &'tcx hir::Expr<'tcx>) {
256         self.fix_scalar_builtin_expr(e);
257         self.fix_index_builtin_expr(e);
258
259         self.visit_node_id(e.span, e.hir_id);
260
261         match e.kind {
262             hir::ExprKind::Closure(_, _, body, _, _) => {
263                 let body = self.fcx.tcx.hir().body(body);
264                 for param in body.params {
265                     self.visit_node_id(e.span, param.hir_id);
266                 }
267
268                 self.visit_body(body);
269             }
270             hir::ExprKind::Struct(_, fields, _) => {
271                 for field in fields {
272                     self.visit_field_id(field.hir_id);
273                 }
274             }
275             hir::ExprKind::Field(..) => {
276                 self.visit_field_id(e.hir_id);
277             }
278             hir::ExprKind::ConstBlock(anon_const) => {
279                 self.visit_node_id(e.span, anon_const.hir_id);
280
281                 let body = self.tcx().hir().body(anon_const.body);
282                 self.visit_body(body);
283             }
284             _ => {}
285         }
286
287         intravisit::walk_expr(self, e);
288     }
289
290     fn visit_block(&mut self, b: &'tcx hir::Block<'tcx>) {
291         self.visit_node_id(b.span, b.hir_id);
292         intravisit::walk_block(self, b);
293     }
294
295     fn visit_pat(&mut self, p: &'tcx hir::Pat<'tcx>) {
296         match p.kind {
297             hir::PatKind::Binding(..) => {
298                 let typeck_results = self.fcx.typeck_results.borrow();
299                 if let Some(bm) =
300                     typeck_results.extract_binding_mode(self.tcx().sess, p.hir_id, p.span)
301                 {
302                     self.typeck_results.pat_binding_modes_mut().insert(p.hir_id, bm);
303                 }
304             }
305             hir::PatKind::Struct(_, fields, _) => {
306                 for field in fields {
307                     self.visit_field_id(field.hir_id);
308                 }
309             }
310             _ => {}
311         };
312
313         self.visit_pat_adjustments(p.span, p.hir_id);
314
315         self.visit_node_id(p.span, p.hir_id);
316         intravisit::walk_pat(self, p);
317     }
318
319     fn visit_local(&mut self, l: &'tcx hir::Local<'tcx>) {
320         intravisit::walk_local(self, l);
321         let var_ty = self.fcx.local_ty(l.span, l.hir_id).decl_ty;
322         let var_ty = self.resolve(var_ty, &l.span);
323         self.write_ty_to_typeck_results(l.hir_id, var_ty);
324     }
325
326     fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty<'tcx>) {
327         intravisit::walk_ty(self, hir_ty);
328         let ty = self.fcx.node_ty(hir_ty.hir_id);
329         let ty = self.resolve(ty, &hir_ty.span);
330         self.write_ty_to_typeck_results(hir_ty.hir_id, ty);
331     }
332
333     fn visit_infer(&mut self, inf: &'tcx hir::InferArg) {
334         intravisit::walk_inf(self, inf);
335         // Ignore cases where the inference is a const.
336         if let Some(ty) = self.fcx.node_ty_opt(inf.hir_id) {
337             let ty = self.resolve(ty, &inf.span);
338             self.write_ty_to_typeck_results(inf.hir_id, ty);
339         }
340     }
341 }
342
343 impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
344     fn eval_closure_size(&mut self) {
345         let mut res: FxHashMap<DefId, ClosureSizeProfileData<'tcx>> = Default::default();
346         for (closure_def_id, data) in self.fcx.typeck_results.borrow().closure_size_eval.iter() {
347             let closure_hir_id =
348                 self.tcx().hir().local_def_id_to_hir_id(closure_def_id.expect_local());
349
350             let data = self.resolve(*data, &closure_hir_id);
351
352             res.insert(*closure_def_id, data);
353         }
354
355         self.typeck_results.closure_size_eval = res;
356     }
357     fn visit_min_capture_map(&mut self) {
358         let mut min_captures_wb = ty::MinCaptureInformationMap::with_capacity_and_hasher(
359             self.fcx.typeck_results.borrow().closure_min_captures.len(),
360             Default::default(),
361         );
362         for (closure_def_id, root_min_captures) in
363             self.fcx.typeck_results.borrow().closure_min_captures.iter()
364         {
365             let mut root_var_map_wb = ty::RootVariableMinCaptureList::with_capacity_and_hasher(
366                 root_min_captures.len(),
367                 Default::default(),
368             );
369             for (var_hir_id, min_list) in root_min_captures.iter() {
370                 let min_list_wb = min_list
371                     .iter()
372                     .map(|captured_place| {
373                         let locatable = captured_place.info.path_expr_id.unwrap_or_else(|| {
374                             self.tcx().hir().local_def_id_to_hir_id(closure_def_id.expect_local())
375                         });
376
377                         self.resolve(captured_place.clone(), &locatable)
378                     })
379                     .collect();
380                 root_var_map_wb.insert(*var_hir_id, min_list_wb);
381             }
382             min_captures_wb.insert(*closure_def_id, root_var_map_wb);
383         }
384
385         self.typeck_results.closure_min_captures = min_captures_wb;
386     }
387
388     fn visit_fake_reads_map(&mut self) {
389         let mut resolved_closure_fake_reads: FxHashMap<
390             DefId,
391             Vec<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>,
392         > = Default::default();
393         for (closure_def_id, fake_reads) in
394             self.fcx.typeck_results.borrow().closure_fake_reads.iter()
395         {
396             let mut resolved_fake_reads = Vec::<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>::new();
397             for (place, cause, hir_id) in fake_reads.iter() {
398                 let locatable =
399                     self.tcx().hir().local_def_id_to_hir_id(closure_def_id.expect_local());
400
401                 let resolved_fake_read = self.resolve(place.clone(), &locatable);
402                 resolved_fake_reads.push((resolved_fake_read, *cause, *hir_id));
403             }
404             resolved_closure_fake_reads.insert(*closure_def_id, resolved_fake_reads);
405         }
406         self.typeck_results.closure_fake_reads = resolved_closure_fake_reads;
407     }
408
409     fn visit_closures(&mut self) {
410         let fcx_typeck_results = self.fcx.typeck_results.borrow();
411         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
412         let common_hir_owner = fcx_typeck_results.hir_owner;
413
414         for (id, origin) in fcx_typeck_results.closure_kind_origins().iter() {
415             let hir_id = hir::HirId { owner: common_hir_owner, local_id: *id };
416             let place_span = origin.0;
417             let place = self.resolve(origin.1.clone(), &place_span);
418             self.typeck_results.closure_kind_origins_mut().insert(hir_id, (place_span, place));
419         }
420     }
421
422     fn visit_coercion_casts(&mut self) {
423         let fcx_typeck_results = self.fcx.typeck_results.borrow();
424         let fcx_coercion_casts = fcx_typeck_results.coercion_casts();
425         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
426
427         for local_id in fcx_coercion_casts {
428             self.typeck_results.set_coercion_cast(*local_id);
429         }
430     }
431
432     fn visit_user_provided_tys(&mut self) {
433         let fcx_typeck_results = self.fcx.typeck_results.borrow();
434         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
435         let common_hir_owner = fcx_typeck_results.hir_owner;
436
437         let mut errors_buffer = Vec::new();
438         for (&local_id, c_ty) in fcx_typeck_results.user_provided_types().iter() {
439             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
440
441             if cfg!(debug_assertions) && c_ty.needs_infer() {
442                 span_bug!(
443                     hir_id.to_span(self.fcx.tcx),
444                     "writeback: `{:?}` has inference variables",
445                     c_ty
446                 );
447             };
448
449             self.typeck_results.user_provided_types_mut().insert(hir_id, *c_ty);
450
451             if let ty::UserType::TypeOf(_, user_substs) = c_ty.value {
452                 if self.rustc_dump_user_substs {
453                     // This is a unit-testing mechanism.
454                     let span = self.tcx().hir().span(hir_id);
455                     // We need to buffer the errors in order to guarantee a consistent
456                     // order when emitting them.
457                     let err = self
458                         .tcx()
459                         .sess
460                         .struct_span_err(span, &format!("user substs: {:?}", user_substs));
461                     err.buffer(&mut errors_buffer);
462                 }
463             }
464         }
465
466         if !errors_buffer.is_empty() {
467             errors_buffer.sort_by_key(|diag| diag.span.primary_span());
468             for diag in errors_buffer.drain(..) {
469                 self.tcx().sess.diagnostic().emit_diagnostic(&diag);
470             }
471         }
472     }
473
474     fn visit_user_provided_sigs(&mut self) {
475         let fcx_typeck_results = self.fcx.typeck_results.borrow();
476         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
477
478         for (&def_id, c_sig) in fcx_typeck_results.user_provided_sigs.iter() {
479             if cfg!(debug_assertions) && c_sig.needs_infer() {
480                 span_bug!(
481                     self.fcx.tcx.hir().span_if_local(def_id).unwrap(),
482                     "writeback: `{:?}` has inference variables",
483                     c_sig
484                 );
485             };
486
487             self.typeck_results.user_provided_sigs.insert(def_id, *c_sig);
488         }
489     }
490
491     fn visit_generator_interior_types(&mut self) {
492         let fcx_typeck_results = self.fcx.typeck_results.borrow();
493         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
494         self.typeck_results.generator_interior_types =
495             fcx_typeck_results.generator_interior_types.clone();
496     }
497
498     #[instrument(skip(self), level = "debug")]
499     fn visit_opaque_types(&mut self) {
500         let opaque_types =
501             self.fcx.infcx.inner.borrow_mut().opaque_type_storage.take_opaque_types();
502         for (opaque_type_key, decl) in opaque_types {
503             let hidden_type = match decl.origin {
504                 hir::OpaqueTyOrigin::FnReturn(_) | hir::OpaqueTyOrigin::AsyncFn(_) => {
505                     Some(self.resolve(decl.hidden_type.ty, &decl.hidden_type.span))
506                 }
507                 hir::OpaqueTyOrigin::TyAlias => None,
508             };
509             self.typeck_results.concrete_opaque_types.insert(opaque_type_key.def_id, hidden_type);
510         }
511     }
512
513     fn visit_field_id(&mut self, hir_id: hir::HirId) {
514         if let Some(index) = self.fcx.typeck_results.borrow_mut().field_indices_mut().remove(hir_id)
515         {
516             self.typeck_results.field_indices_mut().insert(hir_id, index);
517         }
518     }
519
520     #[instrument(skip(self, span), level = "debug")]
521     fn visit_node_id(&mut self, span: Span, hir_id: hir::HirId) {
522         // Export associated path extensions and method resolutions.
523         if let Some(def) =
524             self.fcx.typeck_results.borrow_mut().type_dependent_defs_mut().remove(hir_id)
525         {
526             self.typeck_results.type_dependent_defs_mut().insert(hir_id, def);
527         }
528
529         // Resolve any borrowings for the node with id `node_id`
530         self.visit_adjustments(span, hir_id);
531
532         // Resolve the type of the node with id `node_id`
533         let n_ty = self.fcx.node_ty(hir_id);
534         let n_ty = self.resolve(n_ty, &span);
535         self.write_ty_to_typeck_results(hir_id, n_ty);
536         debug!(?n_ty);
537
538         // Resolve any substitutions
539         if let Some(substs) = self.fcx.typeck_results.borrow().node_substs_opt(hir_id) {
540             let substs = self.resolve(substs, &span);
541             debug!("write_substs_to_tcx({:?}, {:?})", hir_id, substs);
542             assert!(!substs.needs_infer() && !substs.has_placeholders());
543             self.typeck_results.node_substs_mut().insert(hir_id, substs);
544         }
545     }
546
547     #[instrument(skip(self, span), level = "debug")]
548     fn visit_adjustments(&mut self, span: Span, hir_id: hir::HirId) {
549         let adjustment = self.fcx.typeck_results.borrow_mut().adjustments_mut().remove(hir_id);
550         match adjustment {
551             None => {
552                 debug!("no adjustments for node");
553             }
554
555             Some(adjustment) => {
556                 let resolved_adjustment = self.resolve(adjustment, &span);
557                 debug!(?resolved_adjustment);
558                 self.typeck_results.adjustments_mut().insert(hir_id, resolved_adjustment);
559             }
560         }
561     }
562
563     #[instrument(skip(self, span), level = "debug")]
564     fn visit_pat_adjustments(&mut self, span: Span, hir_id: hir::HirId) {
565         let adjustment = self.fcx.typeck_results.borrow_mut().pat_adjustments_mut().remove(hir_id);
566         match adjustment {
567             None => {
568                 debug!("no pat_adjustments for node");
569             }
570
571             Some(adjustment) => {
572                 let resolved_adjustment = self.resolve(adjustment, &span);
573                 debug!(?resolved_adjustment);
574                 self.typeck_results.pat_adjustments_mut().insert(hir_id, resolved_adjustment);
575             }
576         }
577     }
578
579     fn visit_liberated_fn_sigs(&mut self) {
580         let fcx_typeck_results = self.fcx.typeck_results.borrow();
581         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
582         let common_hir_owner = fcx_typeck_results.hir_owner;
583
584         for (&local_id, &fn_sig) in fcx_typeck_results.liberated_fn_sigs().iter() {
585             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
586             let fn_sig = self.resolve(fn_sig, &hir_id);
587             self.typeck_results.liberated_fn_sigs_mut().insert(hir_id, fn_sig);
588         }
589     }
590
591     fn visit_fru_field_types(&mut self) {
592         let fcx_typeck_results = self.fcx.typeck_results.borrow();
593         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
594         let common_hir_owner = fcx_typeck_results.hir_owner;
595
596         for (&local_id, ftys) in fcx_typeck_results.fru_field_types().iter() {
597             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
598             let ftys = self.resolve(ftys.clone(), &hir_id);
599             self.typeck_results.fru_field_types_mut().insert(hir_id, ftys);
600         }
601     }
602
603     fn resolve<T>(&mut self, x: T, span: &dyn Locatable) -> T
604     where
605         T: TypeFoldable<'tcx>,
606     {
607         let mut resolver = Resolver::new(self.fcx, span, self.body);
608         let x = x.fold_with(&mut resolver);
609         if cfg!(debug_assertions) && x.needs_infer() {
610             span_bug!(span.to_span(self.fcx.tcx), "writeback: `{:?}` has inference variables", x);
611         }
612
613         // We may have introduced e.g. `ty::Error`, if inference failed, make sure
614         // to mark the `TypeckResults` as tainted in that case, so that downstream
615         // users of the typeck results don't produce extra errors, or worse, ICEs.
616         if resolver.replaced_with_error {
617             // FIXME(eddyb) keep track of `ErrorReported` from where the error was emitted.
618             self.typeck_results.tainted_by_errors = Some(ErrorReported);
619         }
620
621         x
622     }
623 }
624
625 crate trait Locatable {
626     fn to_span(&self, tcx: TyCtxt<'_>) -> Span;
627 }
628
629 impl Locatable for Span {
630     fn to_span(&self, _: TyCtxt<'_>) -> Span {
631         *self
632     }
633 }
634
635 impl Locatable for hir::HirId {
636     fn to_span(&self, tcx: TyCtxt<'_>) -> Span {
637         tcx.hir().span(*self)
638     }
639 }
640
641 /// The Resolver. This is the type folding engine that detects
642 /// unresolved types and so forth.
643 struct Resolver<'cx, 'tcx> {
644     tcx: TyCtxt<'tcx>,
645     infcx: &'cx InferCtxt<'cx, 'tcx>,
646     span: &'cx dyn Locatable,
647     body: &'tcx hir::Body<'tcx>,
648
649     /// Set to `true` if any `Ty` or `ty::Const` had to be replaced with an `Error`.
650     replaced_with_error: bool,
651 }
652
653 impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
654     fn new(
655         fcx: &'cx FnCtxt<'cx, 'tcx>,
656         span: &'cx dyn Locatable,
657         body: &'tcx hir::Body<'tcx>,
658     ) -> Resolver<'cx, 'tcx> {
659         Resolver { tcx: fcx.tcx, infcx: fcx, span, body, replaced_with_error: false }
660     }
661
662     fn report_type_error(&self, t: Ty<'tcx>) {
663         if !self.tcx.sess.has_errors() {
664             self.infcx
665                 .emit_inference_failure_err(
666                     Some(self.body.id()),
667                     self.span.to_span(self.tcx),
668                     t.into(),
669                     vec![],
670                     E0282,
671                 )
672                 .emit();
673         }
674     }
675
676     fn report_const_error(&self, c: &'tcx ty::Const<'tcx>) {
677         if !self.tcx.sess.has_errors() {
678             self.infcx
679                 .emit_inference_failure_err(
680                     Some(self.body.id()),
681                     self.span.to_span(self.tcx),
682                     c.into(),
683                     vec![],
684                     E0282,
685                 )
686                 .emit();
687         }
688     }
689 }
690
691 struct EraseEarlyRegions<'tcx> {
692     tcx: TyCtxt<'tcx>,
693 }
694
695 impl<'tcx> TypeFolder<'tcx> for EraseEarlyRegions<'tcx> {
696     fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
697         self.tcx
698     }
699     fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
700         if ty.has_type_flags(ty::TypeFlags::HAS_FREE_REGIONS) {
701             ty.super_fold_with(self)
702         } else {
703             ty
704         }
705     }
706     fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
707         if let ty::ReLateBound(..) = r { r } else { self.tcx.lifetimes.re_erased }
708     }
709 }
710
711 impl<'cx, 'tcx> TypeFolder<'tcx> for Resolver<'cx, 'tcx> {
712     fn tcx<'a>(&'a self) -> TyCtxt<'tcx> {
713         self.tcx
714     }
715
716     fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
717         match self.infcx.fully_resolve(t) {
718             Ok(t) => {
719                 // Do not anonymize late-bound regions
720                 // (e.g. keep `for<'a>` named `for<'a>`).
721                 // This allows NLL to generate error messages that
722                 // refer to the higher-ranked lifetime names written by the user.
723                 EraseEarlyRegions { tcx: self.infcx.tcx }.fold_ty(t)
724             }
725             Err(_) => {
726                 debug!("Resolver::fold_ty: input type `{:?}` not fully resolvable", t);
727                 self.report_type_error(t);
728                 self.replaced_with_error = true;
729                 self.tcx().ty_error()
730             }
731         }
732     }
733
734     fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
735         debug_assert!(!r.is_late_bound(), "Should not be resolving bound region.");
736         self.tcx.lifetimes.re_erased
737     }
738
739     fn fold_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> {
740         match self.infcx.fully_resolve(ct) {
741             Ok(ct) => self.infcx.tcx.erase_regions(ct),
742             Err(_) => {
743                 debug!("Resolver::fold_const: input const `{:?}` not fully resolvable", ct);
744                 self.report_const_error(ct);
745                 self.replaced_with_error = true;
746                 self.tcx().const_error(ct.ty)
747             }
748         }
749     }
750 }
751
752 ///////////////////////////////////////////////////////////////////////////
753 // During type check, we store promises with the result of trait
754 // lookup rather than the actual results (because the results are not
755 // necessarily available immediately). These routines unwind the
756 // promises. It is expected that we will have already reported any
757 // errors that may be encountered, so if the promises store an error,
758 // a dummy result is returned.