]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_typeck/src/check/writeback.rs
Stabilize File::options()
[rust.git] / compiler / rustc_typeck / src / check / writeback.rs
1 // Type resolution: the phase that finds all the types in the AST with
2 // unresolved type variables and replaces "ty_var" types with their
3 // substitutions.
4
5 use crate::check::FnCtxt;
6
7 use rustc_data_structures::stable_map::FxHashMap;
8 use rustc_errors::ErrorReported;
9 use rustc_hir as hir;
10 use rustc_hir::def_id::DefId;
11 use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
12 use rustc_infer::infer::error_reporting::TypeAnnotationNeeded::E0282;
13 use rustc_infer::infer::InferCtxt;
14 use rustc_middle::hir::place::Place as HirPlace;
15 use rustc_middle::mir::FakeReadCause;
16 use rustc_middle::ty::adjustment::{Adjust, Adjustment, PointerCast};
17 use rustc_middle::ty::fold::{TypeFoldable, TypeFolder};
18 use rustc_middle::ty::{self, ClosureSizeProfileData, Ty, TyCtxt};
19 use rustc_span::symbol::sym;
20 use rustc_span::Span;
21 use rustc_trait_selection::opaque_types::InferCtxtExt;
22
23 use std::mem;
24
25 ///////////////////////////////////////////////////////////////////////////
26 // Entry point
27
28 // During type inference, partially inferred types are
29 // represented using Type variables (ty::Infer). These don't appear in
30 // the final TypeckResults since all of the types should have been
31 // inferred once typeck is done.
32 // When type inference is running however, having to update the typeck
33 // typeck results every time a new type is inferred would be unreasonably slow,
34 // so instead all of the replacement happens at the end in
35 // resolve_type_vars_in_body, which creates a new TypeTables which
36 // doesn't contain any inference types.
37 impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
38     pub fn resolve_type_vars_in_body(
39         &self,
40         body: &'tcx hir::Body<'tcx>,
41     ) -> &'tcx ty::TypeckResults<'tcx> {
42         let item_id = self.tcx.hir().body_owner(body.id());
43         let item_def_id = self.tcx.hir().local_def_id(item_id);
44
45         // This attribute causes us to dump some writeback information
46         // in the form of errors, which is uSymbol for unit tests.
47         let rustc_dump_user_substs =
48             self.tcx.has_attr(item_def_id.to_def_id(), sym::rustc_dump_user_substs);
49
50         let mut wbcx = WritebackCx::new(self, body, rustc_dump_user_substs);
51         for param in body.params {
52             wbcx.visit_node_id(param.pat.span, param.hir_id);
53         }
54         // Type only exists for constants and statics, not functions.
55         match self.tcx.hir().body_owner_kind(item_id) {
56             hir::BodyOwnerKind::Const | hir::BodyOwnerKind::Static(_) => {
57                 wbcx.visit_node_id(body.value.span, item_id);
58             }
59             hir::BodyOwnerKind::Closure | hir::BodyOwnerKind::Fn => (),
60         }
61         wbcx.visit_body(body);
62         wbcx.visit_min_capture_map();
63         wbcx.eval_closure_size();
64         wbcx.visit_fake_reads_map();
65         wbcx.visit_closures();
66         wbcx.visit_liberated_fn_sigs();
67         wbcx.visit_fru_field_types();
68         wbcx.visit_opaque_types(body.value.span);
69         wbcx.visit_coercion_casts();
70         wbcx.visit_user_provided_tys();
71         wbcx.visit_user_provided_sigs();
72         wbcx.visit_generator_interior_types();
73
74         let used_trait_imports =
75             mem::take(&mut self.typeck_results.borrow_mut().used_trait_imports);
76         debug!("used_trait_imports({:?}) = {:?}", item_def_id, used_trait_imports);
77         wbcx.typeck_results.used_trait_imports = used_trait_imports;
78
79         wbcx.typeck_results.treat_byte_string_as_slice =
80             mem::take(&mut self.typeck_results.borrow_mut().treat_byte_string_as_slice);
81
82         if self.is_tainted_by_errors() {
83             // FIXME(eddyb) keep track of `ErrorReported` from where the error was emitted.
84             wbcx.typeck_results.tainted_by_errors = Some(ErrorReported);
85         }
86
87         debug!("writeback: typeck results for {:?} are {:#?}", item_def_id, wbcx.typeck_results);
88
89         self.tcx.arena.alloc(wbcx.typeck_results)
90     }
91 }
92
93 ///////////////////////////////////////////////////////////////////////////
94 // The Writeback context. This visitor walks the HIR, checking the
95 // fn-specific typeck results to find references to types or regions. It
96 // resolves those regions to remove inference variables and writes the
97 // final result back into the master typeck results in the tcx. Here and
98 // there, it applies a few ad-hoc checks that were not convenient to
99 // do elsewhere.
100
101 struct WritebackCx<'cx, 'tcx> {
102     fcx: &'cx FnCtxt<'cx, 'tcx>,
103
104     typeck_results: ty::TypeckResults<'tcx>,
105
106     body: &'tcx hir::Body<'tcx>,
107
108     rustc_dump_user_substs: bool,
109 }
110
111 impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
112     fn new(
113         fcx: &'cx FnCtxt<'cx, 'tcx>,
114         body: &'tcx hir::Body<'tcx>,
115         rustc_dump_user_substs: bool,
116     ) -> WritebackCx<'cx, 'tcx> {
117         let owner = body.id().hir_id.owner;
118
119         WritebackCx {
120             fcx,
121             typeck_results: ty::TypeckResults::new(owner),
122             body,
123             rustc_dump_user_substs,
124         }
125     }
126
127     fn tcx(&self) -> TyCtxt<'tcx> {
128         self.fcx.tcx
129     }
130
131     fn write_ty_to_typeck_results(&mut self, hir_id: hir::HirId, ty: Ty<'tcx>) {
132         debug!("write_ty_to_typeck_results({:?}, {:?})", hir_id, ty);
133         assert!(!ty.needs_infer() && !ty.has_placeholders() && !ty.has_free_regions(self.tcx()));
134         self.typeck_results.node_types_mut().insert(hir_id, ty);
135     }
136
137     // Hacky hack: During type-checking, we treat *all* operators
138     // as potentially overloaded. But then, during writeback, if
139     // we observe that something like `a+b` is (known to be)
140     // operating on scalars, we clear the overload.
141     fn fix_scalar_builtin_expr(&mut self, e: &hir::Expr<'_>) {
142         match e.kind {
143             hir::ExprKind::Unary(hir::UnOp::Neg | hir::UnOp::Not, inner) => {
144                 let inner_ty = self.fcx.node_ty(inner.hir_id);
145                 let inner_ty = self.fcx.resolve_vars_if_possible(inner_ty);
146
147                 if inner_ty.is_scalar() {
148                     let mut typeck_results = self.fcx.typeck_results.borrow_mut();
149                     typeck_results.type_dependent_defs_mut().remove(e.hir_id);
150                     typeck_results.node_substs_mut().remove(e.hir_id);
151                 }
152             }
153             hir::ExprKind::Binary(ref op, lhs, rhs) | hir::ExprKind::AssignOp(ref op, lhs, rhs) => {
154                 let lhs_ty = self.fcx.node_ty(lhs.hir_id);
155                 let lhs_ty = self.fcx.resolve_vars_if_possible(lhs_ty);
156
157                 let rhs_ty = self.fcx.node_ty(rhs.hir_id);
158                 let rhs_ty = self.fcx.resolve_vars_if_possible(rhs_ty);
159
160                 if lhs_ty.is_scalar() && rhs_ty.is_scalar() {
161                     let mut typeck_results = self.fcx.typeck_results.borrow_mut();
162                     typeck_results.type_dependent_defs_mut().remove(e.hir_id);
163                     typeck_results.node_substs_mut().remove(e.hir_id);
164
165                     match e.kind {
166                         hir::ExprKind::Binary(..) => {
167                             if !op.node.is_by_value() {
168                                 let mut adjustments = typeck_results.adjustments_mut();
169                                 if let Some(a) = adjustments.get_mut(lhs.hir_id) {
170                                     a.pop();
171                                 }
172                                 if let Some(a) = adjustments.get_mut(rhs.hir_id) {
173                                     a.pop();
174                                 }
175                             }
176                         }
177                         hir::ExprKind::AssignOp(..)
178                             if let Some(a) = typeck_results.adjustments_mut().get_mut(lhs.hir_id) =>
179                         {
180                             a.pop();
181                         }
182                         _ => {}
183                     }
184                 }
185             }
186             _ => {}
187         }
188     }
189
190     // Similar to operators, indexing is always assumed to be overloaded
191     // Here, correct cases where an indexing expression can be simplified
192     // to use builtin indexing because the index type is known to be
193     // usize-ish
194     fn fix_index_builtin_expr(&mut self, e: &hir::Expr<'_>) {
195         if let hir::ExprKind::Index(ref base, ref index) = e.kind {
196             let mut typeck_results = self.fcx.typeck_results.borrow_mut();
197
198             // All valid indexing looks like this; might encounter non-valid indexes at this point.
199             let base_ty = typeck_results
200                 .expr_ty_adjusted_opt(base)
201                 .map(|t| self.fcx.resolve_vars_if_possible(t).kind());
202             if base_ty.is_none() {
203                 // When encountering `return [0][0]` outside of a `fn` body we can encounter a base
204                 // that isn't in the type table. We assume more relevant errors have already been
205                 // emitted, so we delay an ICE if none have. (#64638)
206                 self.tcx().sess.delay_span_bug(e.span, &format!("bad base: `{:?}`", base));
207             }
208             if let Some(ty::Ref(_, base_ty, _)) = base_ty {
209                 let index_ty = typeck_results.expr_ty_adjusted_opt(index).unwrap_or_else(|| {
210                     // When encountering `return [0][0]` outside of a `fn` body we would attempt
211                     // to access an unexistend index. We assume that more relevant errors will
212                     // already have been emitted, so we only gate on this with an ICE if no
213                     // error has been emitted. (#64638)
214                     self.fcx.tcx.ty_error_with_message(
215                         e.span,
216                         &format!("bad index {:?} for base: `{:?}`", index, base),
217                     )
218                 });
219                 let index_ty = self.fcx.resolve_vars_if_possible(index_ty);
220
221                 if base_ty.builtin_index().is_some() && index_ty == self.fcx.tcx.types.usize {
222                     // Remove the method call record
223                     typeck_results.type_dependent_defs_mut().remove(e.hir_id);
224                     typeck_results.node_substs_mut().remove(e.hir_id);
225
226                     if let Some(a) = typeck_results.adjustments_mut().get_mut(base.hir_id) {
227                         // Discard the need for a mutable borrow
228
229                         // Extra adjustment made when indexing causes a drop
230                         // of size information - we need to get rid of it
231                         // Since this is "after" the other adjustment to be
232                         // discarded, we do an extra `pop()`
233                         if let Some(Adjustment {
234                             kind: Adjust::Pointer(PointerCast::Unsize), ..
235                         }) = a.pop()
236                         {
237                             // So the borrow discard actually happens here
238                             a.pop();
239                         }
240                     }
241                 }
242             }
243         }
244     }
245 }
246
247 ///////////////////////////////////////////////////////////////////////////
248 // Impl of Visitor for Resolver
249 //
250 // This is the master code which walks the AST. It delegates most of
251 // the heavy lifting to the generic visit and resolve functions
252 // below. In general, a function is made into a `visitor` if it must
253 // traffic in node-ids or update typeck results in the type context etc.
254
255 impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> {
256     type Map = intravisit::ErasedMap<'tcx>;
257
258     fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
259         NestedVisitorMap::None
260     }
261
262     fn visit_expr(&mut self, e: &'tcx hir::Expr<'tcx>) {
263         self.fix_scalar_builtin_expr(e);
264         self.fix_index_builtin_expr(e);
265
266         self.visit_node_id(e.span, e.hir_id);
267
268         match e.kind {
269             hir::ExprKind::Closure(_, _, body, _, _) => {
270                 let body = self.fcx.tcx.hir().body(body);
271                 for param in body.params {
272                     self.visit_node_id(e.span, param.hir_id);
273                 }
274
275                 self.visit_body(body);
276             }
277             hir::ExprKind::Struct(_, fields, _) => {
278                 for field in fields {
279                     self.visit_field_id(field.hir_id);
280                 }
281             }
282             hir::ExprKind::Field(..) => {
283                 self.visit_field_id(e.hir_id);
284             }
285             _ => {}
286         }
287
288         intravisit::walk_expr(self, e);
289     }
290
291     fn visit_block(&mut self, b: &'tcx hir::Block<'tcx>) {
292         self.visit_node_id(b.span, b.hir_id);
293         intravisit::walk_block(self, b);
294     }
295
296     fn visit_pat(&mut self, p: &'tcx hir::Pat<'tcx>) {
297         match p.kind {
298             hir::PatKind::Binding(..) => {
299                 let typeck_results = self.fcx.typeck_results.borrow();
300                 if let Some(bm) =
301                     typeck_results.extract_binding_mode(self.tcx().sess, p.hir_id, p.span)
302                 {
303                     self.typeck_results.pat_binding_modes_mut().insert(p.hir_id, bm);
304                 }
305             }
306             hir::PatKind::Struct(_, fields, _) => {
307                 for field in fields {
308                     self.visit_field_id(field.hir_id);
309                 }
310             }
311             _ => {}
312         };
313
314         self.visit_pat_adjustments(p.span, p.hir_id);
315
316         self.visit_node_id(p.span, p.hir_id);
317         intravisit::walk_pat(self, p);
318     }
319
320     fn visit_local(&mut self, l: &'tcx hir::Local<'tcx>) {
321         intravisit::walk_local(self, l);
322         let var_ty = self.fcx.local_ty(l.span, l.hir_id).decl_ty;
323         let var_ty = self.resolve(var_ty, &l.span);
324         self.write_ty_to_typeck_results(l.hir_id, var_ty);
325     }
326
327     fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty<'tcx>) {
328         intravisit::walk_ty(self, hir_ty);
329         let ty = self.fcx.node_ty(hir_ty.hir_id);
330         let ty = self.resolve(ty, &hir_ty.span);
331         self.write_ty_to_typeck_results(hir_ty.hir_id, ty);
332     }
333
334     fn visit_infer(&mut self, inf: &'tcx hir::InferArg) {
335         intravisit::walk_inf(self, inf);
336         // Ignore cases where the inference is a const.
337         if let Some(ty) = self.fcx.node_ty_opt(inf.hir_id) {
338             let ty = self.resolve(ty, &inf.span);
339             self.write_ty_to_typeck_results(inf.hir_id, ty);
340         }
341     }
342 }
343
344 impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
345     fn eval_closure_size(&mut self) {
346         let mut res: FxHashMap<DefId, ClosureSizeProfileData<'tcx>> = Default::default();
347         for (closure_def_id, data) in self.fcx.typeck_results.borrow().closure_size_eval.iter() {
348             let closure_hir_id =
349                 self.tcx().hir().local_def_id_to_hir_id(closure_def_id.expect_local());
350
351             let data = self.resolve(*data, &closure_hir_id);
352
353             res.insert(*closure_def_id, data);
354         }
355
356         self.typeck_results.closure_size_eval = res;
357     }
358     fn visit_min_capture_map(&mut self) {
359         let mut min_captures_wb = ty::MinCaptureInformationMap::with_capacity_and_hasher(
360             self.fcx.typeck_results.borrow().closure_min_captures.len(),
361             Default::default(),
362         );
363         for (closure_def_id, root_min_captures) in
364             self.fcx.typeck_results.borrow().closure_min_captures.iter()
365         {
366             let mut root_var_map_wb = ty::RootVariableMinCaptureList::with_capacity_and_hasher(
367                 root_min_captures.len(),
368                 Default::default(),
369             );
370             for (var_hir_id, min_list) in root_min_captures.iter() {
371                 let min_list_wb = min_list
372                     .iter()
373                     .map(|captured_place| {
374                         let locatable = captured_place.info.path_expr_id.unwrap_or_else(|| {
375                             self.tcx().hir().local_def_id_to_hir_id(closure_def_id.expect_local())
376                         });
377
378                         self.resolve(captured_place.clone(), &locatable)
379                     })
380                     .collect();
381                 root_var_map_wb.insert(*var_hir_id, min_list_wb);
382             }
383             min_captures_wb.insert(*closure_def_id, root_var_map_wb);
384         }
385
386         self.typeck_results.closure_min_captures = min_captures_wb;
387     }
388
389     fn visit_fake_reads_map(&mut self) {
390         let mut resolved_closure_fake_reads: FxHashMap<
391             DefId,
392             Vec<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>,
393         > = Default::default();
394         for (closure_def_id, fake_reads) in
395             self.fcx.typeck_results.borrow().closure_fake_reads.iter()
396         {
397             let mut resolved_fake_reads = Vec::<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>::new();
398             for (place, cause, hir_id) in fake_reads.iter() {
399                 let locatable =
400                     self.tcx().hir().local_def_id_to_hir_id(closure_def_id.expect_local());
401
402                 let resolved_fake_read = self.resolve(place.clone(), &locatable);
403                 resolved_fake_reads.push((resolved_fake_read, *cause, *hir_id));
404             }
405             resolved_closure_fake_reads.insert(*closure_def_id, resolved_fake_reads);
406         }
407         self.typeck_results.closure_fake_reads = resolved_closure_fake_reads;
408     }
409
410     fn visit_closures(&mut self) {
411         let fcx_typeck_results = self.fcx.typeck_results.borrow();
412         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
413         let common_hir_owner = fcx_typeck_results.hir_owner;
414
415         for (id, origin) in fcx_typeck_results.closure_kind_origins().iter() {
416             let hir_id = hir::HirId { owner: common_hir_owner, local_id: *id };
417             let place_span = origin.0;
418             let place = self.resolve(origin.1.clone(), &place_span);
419             self.typeck_results.closure_kind_origins_mut().insert(hir_id, (place_span, place));
420         }
421     }
422
423     fn visit_coercion_casts(&mut self) {
424         let fcx_typeck_results = self.fcx.typeck_results.borrow();
425         let fcx_coercion_casts = fcx_typeck_results.coercion_casts();
426         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
427
428         for local_id in fcx_coercion_casts {
429             self.typeck_results.set_coercion_cast(*local_id);
430         }
431     }
432
433     fn visit_user_provided_tys(&mut self) {
434         let fcx_typeck_results = self.fcx.typeck_results.borrow();
435         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
436         let common_hir_owner = fcx_typeck_results.hir_owner;
437
438         let mut errors_buffer = Vec::new();
439         for (&local_id, c_ty) in fcx_typeck_results.user_provided_types().iter() {
440             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
441
442             if cfg!(debug_assertions) && c_ty.needs_infer() {
443                 span_bug!(
444                     hir_id.to_span(self.fcx.tcx),
445                     "writeback: `{:?}` has inference variables",
446                     c_ty
447                 );
448             };
449
450             self.typeck_results.user_provided_types_mut().insert(hir_id, *c_ty);
451
452             if let ty::UserType::TypeOf(_, user_substs) = c_ty.value {
453                 if self.rustc_dump_user_substs {
454                     // This is a unit-testing mechanism.
455                     let span = self.tcx().hir().span(hir_id);
456                     // We need to buffer the errors in order to guarantee a consistent
457                     // order when emitting them.
458                     let err = self
459                         .tcx()
460                         .sess
461                         .struct_span_err(span, &format!("user substs: {:?}", user_substs));
462                     err.buffer(&mut errors_buffer);
463                 }
464             }
465         }
466
467         if !errors_buffer.is_empty() {
468             errors_buffer.sort_by_key(|diag| diag.span.primary_span());
469             for diag in errors_buffer.drain(..) {
470                 self.tcx().sess.diagnostic().emit_diagnostic(&diag);
471             }
472         }
473     }
474
475     fn visit_user_provided_sigs(&mut self) {
476         let fcx_typeck_results = self.fcx.typeck_results.borrow();
477         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
478
479         for (&def_id, c_sig) in fcx_typeck_results.user_provided_sigs.iter() {
480             if cfg!(debug_assertions) && c_sig.needs_infer() {
481                 span_bug!(
482                     self.fcx.tcx.hir().span_if_local(def_id).unwrap(),
483                     "writeback: `{:?}` has inference variables",
484                     c_sig
485                 );
486             };
487
488             self.typeck_results.user_provided_sigs.insert(def_id, *c_sig);
489         }
490     }
491
492     fn visit_generator_interior_types(&mut self) {
493         let fcx_typeck_results = self.fcx.typeck_results.borrow();
494         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
495         self.typeck_results.generator_interior_types =
496             fcx_typeck_results.generator_interior_types.clone();
497     }
498
499     #[instrument(skip(self, span), level = "debug")]
500     fn visit_opaque_types(&mut self, span: Span) {
501         let opaque_types = self.fcx.infcx.inner.borrow().opaque_types.clone();
502         for (opaque_type_key, opaque_defn) in opaque_types {
503             let hir_id =
504                 self.tcx().hir().local_def_id_to_hir_id(opaque_type_key.def_id.expect_local());
505             let instantiated_ty = self.resolve(opaque_defn.concrete_ty, &hir_id);
506
507             debug_assert!(!instantiated_ty.has_escaping_bound_vars());
508
509             let opaque_type_key = self.fcx.fully_resolve(opaque_type_key).unwrap();
510
511             // Prevent:
512             // * `fn foo<T>() -> Foo<T>`
513             // * `fn foo<T: Bound + Other>() -> Foo<T>`
514             // from being defining.
515
516             // Also replace all generic params with the ones from the opaque type
517             // definition so that
518             // ```rust
519             // type Foo<T> = impl Baz + 'static;
520             // fn foo<U>() -> Foo<U> { .. }
521             // ```
522             // figures out the concrete type with `U`, but the stored type is with `T`.
523
524             // FIXME: why are we calling this here? This seems too early, and duplicated.
525             let definition_ty = self.fcx.infer_opaque_definition_from_instantiation(
526                 opaque_type_key,
527                 instantiated_ty,
528                 span,
529             );
530
531             let mut skip_add = false;
532
533             if let ty::Opaque(definition_ty_def_id, _substs) = *definition_ty.kind() {
534                 if opaque_defn.origin == hir::OpaqueTyOrigin::TyAlias {
535                     if opaque_type_key.def_id == definition_ty_def_id {
536                         debug!(
537                             "skipping adding concrete definition for opaque type {:?} {:?}",
538                             opaque_defn, opaque_type_key.def_id
539                         );
540                         skip_add = true;
541                     }
542                 }
543             }
544
545             if opaque_type_key.substs.needs_infer() {
546                 span_bug!(span, "{:#?} has inference variables", opaque_type_key.substs)
547             }
548
549             // We only want to add an entry into `concrete_opaque_types`
550             // if we actually found a defining usage of this opaque type.
551             // Otherwise, we do nothing - we'll either find a defining usage
552             // in some other location, or we'll end up emitting an error due
553             // to the lack of defining usage
554             if !skip_add {
555                 self.typeck_results.concrete_opaque_types.insert(opaque_type_key.def_id);
556             }
557         }
558     }
559
560     fn visit_field_id(&mut self, hir_id: hir::HirId) {
561         if let Some(index) = self.fcx.typeck_results.borrow_mut().field_indices_mut().remove(hir_id)
562         {
563             self.typeck_results.field_indices_mut().insert(hir_id, index);
564         }
565     }
566
567     #[instrument(skip(self, span), level = "debug")]
568     fn visit_node_id(&mut self, span: Span, hir_id: hir::HirId) {
569         // Export associated path extensions and method resolutions.
570         if let Some(def) =
571             self.fcx.typeck_results.borrow_mut().type_dependent_defs_mut().remove(hir_id)
572         {
573             self.typeck_results.type_dependent_defs_mut().insert(hir_id, def);
574         }
575
576         // Resolve any borrowings for the node with id `node_id`
577         self.visit_adjustments(span, hir_id);
578
579         // Resolve the type of the node with id `node_id`
580         let n_ty = self.fcx.node_ty(hir_id);
581         let n_ty = self.resolve(n_ty, &span);
582         self.write_ty_to_typeck_results(hir_id, n_ty);
583         debug!(?n_ty);
584
585         // Resolve any substitutions
586         if let Some(substs) = self.fcx.typeck_results.borrow().node_substs_opt(hir_id) {
587             let substs = self.resolve(substs, &span);
588             debug!("write_substs_to_tcx({:?}, {:?})", hir_id, substs);
589             assert!(!substs.needs_infer() && !substs.has_placeholders());
590             self.typeck_results.node_substs_mut().insert(hir_id, substs);
591         }
592     }
593
594     #[instrument(skip(self, span), level = "debug")]
595     fn visit_adjustments(&mut self, span: Span, hir_id: hir::HirId) {
596         let adjustment = self.fcx.typeck_results.borrow_mut().adjustments_mut().remove(hir_id);
597         match adjustment {
598             None => {
599                 debug!("no adjustments for node");
600             }
601
602             Some(adjustment) => {
603                 let resolved_adjustment = self.resolve(adjustment, &span);
604                 debug!(?resolved_adjustment);
605                 self.typeck_results.adjustments_mut().insert(hir_id, resolved_adjustment);
606             }
607         }
608     }
609
610     #[instrument(skip(self, span), level = "debug")]
611     fn visit_pat_adjustments(&mut self, span: Span, hir_id: hir::HirId) {
612         let adjustment = self.fcx.typeck_results.borrow_mut().pat_adjustments_mut().remove(hir_id);
613         match adjustment {
614             None => {
615                 debug!("no pat_adjustments for node");
616             }
617
618             Some(adjustment) => {
619                 let resolved_adjustment = self.resolve(adjustment, &span);
620                 debug!(?resolved_adjustment);
621                 self.typeck_results.pat_adjustments_mut().insert(hir_id, resolved_adjustment);
622             }
623         }
624     }
625
626     fn visit_liberated_fn_sigs(&mut self) {
627         let fcx_typeck_results = self.fcx.typeck_results.borrow();
628         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
629         let common_hir_owner = fcx_typeck_results.hir_owner;
630
631         for (&local_id, &fn_sig) in fcx_typeck_results.liberated_fn_sigs().iter() {
632             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
633             let fn_sig = self.resolve(fn_sig, &hir_id);
634             self.typeck_results.liberated_fn_sigs_mut().insert(hir_id, fn_sig);
635         }
636     }
637
638     fn visit_fru_field_types(&mut self) {
639         let fcx_typeck_results = self.fcx.typeck_results.borrow();
640         assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
641         let common_hir_owner = fcx_typeck_results.hir_owner;
642
643         for (&local_id, ftys) in fcx_typeck_results.fru_field_types().iter() {
644             let hir_id = hir::HirId { owner: common_hir_owner, local_id };
645             let ftys = self.resolve(ftys.clone(), &hir_id);
646             self.typeck_results.fru_field_types_mut().insert(hir_id, ftys);
647         }
648     }
649
650     fn resolve<T>(&mut self, x: T, span: &dyn Locatable) -> T
651     where
652         T: TypeFoldable<'tcx>,
653     {
654         let mut resolver = Resolver::new(self.fcx, span, self.body);
655         let x = x.fold_with(&mut resolver);
656         if cfg!(debug_assertions) && x.needs_infer() {
657             span_bug!(span.to_span(self.fcx.tcx), "writeback: `{:?}` has inference variables", x);
658         }
659
660         // We may have introduced e.g. `ty::Error`, if inference failed, make sure
661         // to mark the `TypeckResults` as tainted in that case, so that downstream
662         // users of the typeck results don't produce extra errors, or worse, ICEs.
663         if resolver.replaced_with_error {
664             // FIXME(eddyb) keep track of `ErrorReported` from where the error was emitted.
665             self.typeck_results.tainted_by_errors = Some(ErrorReported);
666         }
667
668         x
669     }
670 }
671
672 crate trait Locatable {
673     fn to_span(&self, tcx: TyCtxt<'_>) -> Span;
674 }
675
676 impl Locatable for Span {
677     fn to_span(&self, _: TyCtxt<'_>) -> Span {
678         *self
679     }
680 }
681
682 impl Locatable for hir::HirId {
683     fn to_span(&self, tcx: TyCtxt<'_>) -> Span {
684         tcx.hir().span(*self)
685     }
686 }
687
688 /// The Resolver. This is the type folding engine that detects
689 /// unresolved types and so forth.
690 struct Resolver<'cx, 'tcx> {
691     tcx: TyCtxt<'tcx>,
692     infcx: &'cx InferCtxt<'cx, 'tcx>,
693     span: &'cx dyn Locatable,
694     body: &'tcx hir::Body<'tcx>,
695
696     /// Set to `true` if any `Ty` or `ty::Const` had to be replaced with an `Error`.
697     replaced_with_error: bool,
698 }
699
700 impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
701     fn new(
702         fcx: &'cx FnCtxt<'cx, 'tcx>,
703         span: &'cx dyn Locatable,
704         body: &'tcx hir::Body<'tcx>,
705     ) -> Resolver<'cx, 'tcx> {
706         Resolver { tcx: fcx.tcx, infcx: fcx, span, body, replaced_with_error: false }
707     }
708
709     fn report_type_error(&self, t: Ty<'tcx>) {
710         if !self.tcx.sess.has_errors() {
711             self.infcx
712                 .emit_inference_failure_err(
713                     Some(self.body.id()),
714                     self.span.to_span(self.tcx),
715                     t.into(),
716                     vec![],
717                     E0282,
718                 )
719                 .emit();
720         }
721     }
722
723     fn report_const_error(&self, c: &'tcx ty::Const<'tcx>) {
724         if !self.tcx.sess.has_errors() {
725             self.infcx
726                 .emit_inference_failure_err(
727                     Some(self.body.id()),
728                     self.span.to_span(self.tcx),
729                     c.into(),
730                     vec![],
731                     E0282,
732                 )
733                 .emit();
734         }
735     }
736 }
737
738 struct EraseEarlyRegions<'tcx> {
739     tcx: TyCtxt<'tcx>,
740 }
741
742 impl<'tcx> TypeFolder<'tcx> for EraseEarlyRegions<'tcx> {
743     fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
744         self.tcx
745     }
746     fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
747         if ty.has_type_flags(ty::TypeFlags::HAS_POTENTIAL_FREE_REGIONS) {
748             ty.super_fold_with(self)
749         } else {
750             ty
751         }
752     }
753     fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
754         if let ty::ReLateBound(..) = r { r } else { self.tcx.lifetimes.re_erased }
755     }
756 }
757
758 impl<'cx, 'tcx> TypeFolder<'tcx> for Resolver<'cx, 'tcx> {
759     fn tcx<'a>(&'a self) -> TyCtxt<'tcx> {
760         self.tcx
761     }
762
763     fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
764         match self.infcx.fully_resolve(t) {
765             Ok(t) => {
766                 // Do not anonymize late-bound regions
767                 // (e.g. keep `for<'a>` named `for<'a>`).
768                 // This allows NLL to generate error messages that
769                 // refer to the higher-ranked lifetime names written by the user.
770                 EraseEarlyRegions { tcx: self.infcx.tcx }.fold_ty(t)
771             }
772             Err(_) => {
773                 debug!("Resolver::fold_ty: input type `{:?}` not fully resolvable", t);
774                 self.report_type_error(t);
775                 self.replaced_with_error = true;
776                 self.tcx().ty_error()
777             }
778         }
779     }
780
781     fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
782         debug_assert!(!r.is_late_bound(), "Should not be resolving bound region.");
783         self.tcx.lifetimes.re_erased
784     }
785
786     fn fold_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> {
787         match self.infcx.fully_resolve(ct) {
788             Ok(ct) => self.infcx.tcx.erase_regions(ct),
789             Err(_) => {
790                 debug!("Resolver::fold_const: input const `{:?}` not fully resolvable", ct);
791                 self.report_const_error(ct);
792                 self.replaced_with_error = true;
793                 self.tcx().const_error(ct.ty)
794             }
795         }
796     }
797 }
798
799 ///////////////////////////////////////////////////////////////////////////
800 // During type check, we store promises with the result of trait
801 // lookup rather than the actual results (because the results are not
802 // necessarily available immediately). These routines unwind the
803 // promises. It is expected that we will have already reported any
804 // errors that may be encountered, so if the promises store an error,
805 // a dummy result is returned.