1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
15 Within the check phase of type check, we check each item one at a time
16 (bodies of function expressions are checked as part of the containing
17 function). Inference is used to supply types wherever they are
20 By far the most complex case is checking the body of a function. This
21 can be broken down into several distinct phases:
23 - gather: creates type variables to represent the type of each local
24 variable and pattern binding.
26 - main: the main pass does the lion's share of the work: it
27 determines the types of all expressions, resolves
28 methods, checks for most invalid conditions, and so forth. In
29 some cases, where a type is unknown, it may create a type or region
30 variable and use that as the type of an expression.
32 In the process of checking, various constraints will be placed on
33 these type variables through the subtyping relationships requested
34 through the `demand` module. The `infer` module is in charge
35 of resolving those constraints.
37 - regionck: after main is complete, the regionck pass goes over all
38 types looking for regions and making sure that they did not escape
39 into places they are not in scope. This may also influence the
40 final assignments of the various region variables if there is some
43 - vtable: find and records the impls to use for each trait bound that
44 appears on a type parameter.
46 - writeback: writes the final types within a function body, replacing
47 type variables with their final inferred types. These final types
48 are written into the `tcx.node_types` table, which should *never* contain
49 any reference to a type variable.
53 While type checking a function, the intermediate types for the
54 expressions, blocks, and so forth contained within the function are
55 stored in `fcx.node_types` and `fcx.item_substs`. These types
56 may contain unresolved type variables. After type checking is
57 complete, the functions in the writeback module are used to take the
58 types from this table, resolve them, and then write them into their
59 permanent home in the type context `ccx.tcx`.
61 This means that during inferencing you should use `fcx.write_ty()`
62 and `fcx.expr_ty()` / `fcx.node_ty()` to write/obtain the types of
63 nodes within the function.
65 The types of top-level items, which never contain unbound type
66 variables, are stored directly into the `tcx` tables.
68 n.b.: A type variable is not the same thing as a type parameter. A
69 type variable is rather an "instance" of a type parameter: that is,
70 given a generic function `fn foo<T>(t: T)`: while checking the
71 function `foo`, the type `ty_param(0)` refers to the type `T`, which
72 is treated in abstract. When `foo()` is called, however, `T` will be
73 substituted for a fresh type variable `N`. This variable will
74 eventually be resolved to some concrete type (which might itself be
79 pub use self::Expectation::*;
80 pub use self::compare_method::{compare_impl_method, compare_const_impl};
81 use self::TupleArgumentsFlag::*;
83 use astconv::{AstConv, ast_region_to_region};
84 use dep_graph::DepNode;
85 use fmt_macros::{Parser, Piece, Position};
86 use hir::def::{Def, CtorKind};
87 use hir::def_id::{DefId, LOCAL_CRATE};
88 use rustc::infer::{self, InferCtxt, InferOk, RegionVariableOrigin, TypeTrace};
89 use rustc::infer::type_variable::{self, TypeVariableOrigin};
90 use rustc::ty::subst::{Kind, Subst, Substs};
91 use rustc::traits::{self, ObligationCause, ObligationCauseCode, Reveal};
92 use rustc::ty::{ParamTy, ParameterEnvironment};
93 use rustc::ty::{LvaluePreference, NoPreference, PreferMutLvalue};
94 use rustc::ty::{self, ToPolyTraitRef, Ty, TyCtxt, Visibility};
95 use rustc::ty::{MethodCall, MethodCallee};
96 use rustc::ty::adjustment;
97 use rustc::ty::fold::{BottomUpFolder, TypeFoldable};
98 use rustc::ty::util::{Representability, IntTypeExt};
99 use require_c_abi_if_variadic;
100 use rscope::{ElisionFailureInfo, RegionScope};
101 use session::{Session, CompileResult};
105 use util::common::{ErrorReported, indenter};
106 use util::nodemap::{DefIdMap, FxHashMap, FxHashSet, NodeMap};
108 use std::cell::{Cell, RefCell};
110 use std::mem::replace;
111 use std::ops::{self, Deref};
112 use syntax::abi::Abi;
115 use syntax::codemap::{self, original_sp, Spanned};
116 use syntax::feature_gate::{GateIssue, emit_feature_err};
118 use syntax::symbol::{Symbol, InternedString, keywords};
119 use syntax::util::lev_distance::find_best_match_for_name;
120 use syntax_pos::{self, BytePos, Span, DUMMY_SP};
122 use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
123 use rustc::hir::itemlikevisit::ItemLikeVisitor;
124 use rustc::hir::{self, PatKind};
125 use rustc::middle::lang_items;
126 use rustc_back::slice;
127 use rustc_const_eval::eval_length;
147 /// closures defined within the function. For example:
150 /// bar(move|| { ... })
153 /// Here, the function `foo()` and the closure passed to
154 /// `bar()` will each have their own `FnCtxt`, but they will
155 /// share the inherited fields.
156 pub struct Inherited<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
157 ccx: &'a CrateCtxt<'a, 'gcx>,
158 infcx: InferCtxt<'a, 'gcx, 'tcx>,
159 locals: RefCell<NodeMap<Ty<'tcx>>>,
161 fulfillment_cx: RefCell<traits::FulfillmentContext<'tcx>>,
163 // When we process a call like `c()` where `c` is a closure type,
164 // we may not have decided yet whether `c` is a `Fn`, `FnMut`, or
165 // `FnOnce` closure. In that case, we defer full resolution of the
166 // call until upvar inference can kick in and make the
167 // decision. We keep these deferred resolutions grouped by the
168 // def-id of the closure, so that once we decide, we can easily go
169 // back and process them.
170 deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolutionHandler<'gcx, 'tcx>>>>,
172 deferred_cast_checks: RefCell<Vec<cast::CastCheck<'tcx>>>,
174 // Anonymized types found in explicit return types and their
175 // associated fresh inference variable. Writeback resolves these
176 // variables to get the concrete type, which can be used to
177 // deanonymize TyAnon, after typeck is done with all functions.
178 anon_types: RefCell<DefIdMap<Ty<'tcx>>>,
180 // Obligations which will have to be checked at the end of
181 // type-checking, after all functions have been inferred.
182 deferred_obligations: RefCell<Vec<traits::DeferredObligation<'tcx>>>,
185 impl<'a, 'gcx, 'tcx> Deref for Inherited<'a, 'gcx, 'tcx> {
186 type Target = InferCtxt<'a, 'gcx, 'tcx>;
187 fn deref(&self) -> &Self::Target {
192 trait DeferredCallResolution<'gcx, 'tcx> {
193 fn resolve<'a>(&mut self, fcx: &FnCtxt<'a, 'gcx, 'tcx>);
196 type DeferredCallResolutionHandler<'gcx, 'tcx> = Box<DeferredCallResolution<'gcx, 'tcx>+'tcx>;
198 /// When type-checking an expression, we propagate downward
199 /// whatever type hint we are able in the form of an `Expectation`.
200 #[derive(Copy, Clone, Debug)]
201 pub enum Expectation<'tcx> {
202 /// We know nothing about what type this expression should have.
205 /// This expression should have the type given (or some subtype)
206 ExpectHasType(Ty<'tcx>),
208 /// This expression will be cast to the `Ty`
209 ExpectCastableToType(Ty<'tcx>),
211 /// This rvalue expression will be wrapped in `&` or `Box` and coerced
212 /// to `&Ty` or `Box<Ty>`, respectively. `Ty` is `[A]` or `Trait`.
213 ExpectRvalueLikeUnsized(Ty<'tcx>),
216 impl<'a, 'gcx, 'tcx> Expectation<'tcx> {
217 // Disregard "castable to" expectations because they
218 // can lead us astray. Consider for example `if cond
219 // {22} else {c} as u8` -- if we propagate the
220 // "castable to u8" constraint to 22, it will pick the
221 // type 22u8, which is overly constrained (c might not
222 // be a u8). In effect, the problem is that the
223 // "castable to" expectation is not the tightest thing
224 // we can say, so we want to drop it in this case.
225 // The tightest thing we can say is "must unify with
226 // else branch". Note that in the case of a "has type"
227 // constraint, this limitation does not hold.
229 // If the expected type is just a type variable, then don't use
230 // an expected type. Otherwise, we might write parts of the type
231 // when checking the 'then' block which are incompatible with the
233 fn adjust_for_branches(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Expectation<'tcx> {
235 ExpectHasType(ety) => {
236 let ety = fcx.shallow_resolve(ety);
237 if !ety.is_ty_var() {
243 ExpectRvalueLikeUnsized(ety) => {
244 ExpectRvalueLikeUnsized(ety)
250 /// Provide an expectation for an rvalue expression given an *optional*
251 /// hint, which is not required for type safety (the resulting type might
252 /// be checked higher up, as is the case with `&expr` and `box expr`), but
253 /// is useful in determining the concrete type.
255 /// The primary use case is where the expected type is a fat pointer,
256 /// like `&[isize]`. For example, consider the following statement:
258 /// let x: &[isize] = &[1, 2, 3];
260 /// In this case, the expected type for the `&[1, 2, 3]` expression is
261 /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
262 /// expectation `ExpectHasType([isize])`, that would be too strong --
263 /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
264 /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
265 /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
266 /// which still is useful, because it informs integer literals and the like.
267 /// See the test case `test/run-pass/coerce-expect-unsized.rs` and #20169
268 /// for examples of where this comes up,.
269 fn rvalue_hint(fcx: &FnCtxt<'a, 'gcx, 'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> {
270 match fcx.tcx.struct_tail(ty).sty {
271 ty::TySlice(_) | ty::TyStr | ty::TyDynamic(..) => {
272 ExpectRvalueLikeUnsized(ty)
274 _ => ExpectHasType(ty)
278 // Resolves `expected` by a single level if it is a variable. If
279 // there is no expected type or resolution is not possible (e.g.,
280 // no constraints yet present), just returns `None`.
281 fn resolve(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Expectation<'tcx> {
286 ExpectCastableToType(t) => {
287 ExpectCastableToType(fcx.resolve_type_vars_if_possible(&t))
289 ExpectHasType(t) => {
290 ExpectHasType(fcx.resolve_type_vars_if_possible(&t))
292 ExpectRvalueLikeUnsized(t) => {
293 ExpectRvalueLikeUnsized(fcx.resolve_type_vars_if_possible(&t))
298 fn to_option(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
299 match self.resolve(fcx) {
300 NoExpectation => None,
301 ExpectCastableToType(ty) |
303 ExpectRvalueLikeUnsized(ty) => Some(ty),
307 fn only_has_type(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
308 match self.resolve(fcx) {
309 ExpectHasType(ty) => Some(ty),
315 #[derive(Copy, Clone)]
316 pub struct UnsafetyState {
317 pub def: ast::NodeId,
318 pub unsafety: hir::Unsafety,
319 pub unsafe_push_count: u32,
324 pub fn function(unsafety: hir::Unsafety, def: ast::NodeId) -> UnsafetyState {
325 UnsafetyState { def: def, unsafety: unsafety, unsafe_push_count: 0, from_fn: true }
328 pub fn recurse(&mut self, blk: &hir::Block) -> UnsafetyState {
329 match self.unsafety {
330 // If this unsafe, then if the outer function was already marked as
331 // unsafe we shouldn't attribute the unsafe'ness to the block. This
332 // way the block can be warned about instead of ignoring this
333 // extraneous block (functions are never warned about).
334 hir::Unsafety::Unsafe if self.from_fn => *self,
337 let (unsafety, def, count) = match blk.rules {
338 hir::PushUnsafeBlock(..) =>
339 (unsafety, blk.id, self.unsafe_push_count.checked_add(1).unwrap()),
340 hir::PopUnsafeBlock(..) =>
341 (unsafety, blk.id, self.unsafe_push_count.checked_sub(1).unwrap()),
342 hir::UnsafeBlock(..) =>
343 (hir::Unsafety::Unsafe, blk.id, self.unsafe_push_count),
345 (unsafety, self.def, self.unsafe_push_count),
347 UnsafetyState{ def: def,
349 unsafe_push_count: count,
356 /// Whether a node ever exits normally or not.
357 /// Tracked semi-automatically (through type variables
358 /// marked as diverging), with some manual adjustments
359 /// for control-flow primitives (approximating a CFG).
360 #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
362 /// Potentially unknown, some cases converge,
363 /// others require a CFG to determine them.
366 /// Definitely known to diverge and therefore
367 /// not reach the next sibling or its parent.
370 /// Same as `Always` but with a reachability
371 /// warning already emitted
375 // Convenience impls for combinig `Diverges`.
377 impl ops::BitAnd for Diverges {
379 fn bitand(self, other: Self) -> Self {
380 cmp::min(self, other)
384 impl ops::BitOr for Diverges {
386 fn bitor(self, other: Self) -> Self {
387 cmp::max(self, other)
391 impl ops::BitAndAssign for Diverges {
392 fn bitand_assign(&mut self, other: Self) {
393 *self = *self & other;
397 impl ops::BitOrAssign for Diverges {
398 fn bitor_assign(&mut self, other: Self) {
399 *self = *self | other;
404 fn always(self) -> bool {
405 self >= Diverges::Always
410 pub struct LoopCtxt<'gcx, 'tcx> {
413 break_exprs: Vec<&'gcx hir::Expr>,
418 pub struct EnclosingLoops<'gcx, 'tcx> {
419 stack: Vec<LoopCtxt<'gcx, 'tcx>>,
420 by_id: NodeMap<usize>,
423 impl<'gcx, 'tcx> EnclosingLoops<'gcx, 'tcx> {
424 fn find_loop(&mut self, id: Option<ast::NodeId>) -> Option<&mut LoopCtxt<'gcx, 'tcx>> {
425 if let Some(id) = id {
426 if let Some(ix) = self.by_id.get(&id).cloned() {
427 Some(&mut self.stack[ix])
432 self.stack.last_mut()
438 pub struct FnCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
439 ast_ty_to_ty_cache: RefCell<NodeMap<Ty<'tcx>>>,
441 body_id: ast::NodeId,
443 // This flag is set to true if, during the writeback phase, we encounter
444 // a type error in this function.
445 writeback_errors: Cell<bool>,
447 // Number of errors that had been reported when we started
448 // checking this function. On exit, if we find that *more* errors
449 // have been reported, we will skip regionck and other work that
450 // expects the types within the function to be consistent.
451 err_count_on_creation: usize,
453 ret_ty: Option<Ty<'tcx>>,
455 ps: RefCell<UnsafetyState>,
457 /// Whether the last checked node can ever exit.
458 diverges: Cell<Diverges>,
460 /// Whether any child nodes have any type errors.
461 has_errors: Cell<bool>,
463 enclosing_loops: RefCell<EnclosingLoops<'gcx, 'tcx>>,
465 inh: &'a Inherited<'a, 'gcx, 'tcx>,
468 impl<'a, 'gcx, 'tcx> Deref for FnCtxt<'a, 'gcx, 'tcx> {
469 type Target = Inherited<'a, 'gcx, 'tcx>;
470 fn deref(&self) -> &Self::Target {
475 /// Helper type of a temporary returned by ccx.inherited(...).
476 /// Necessary because we can't write the following bound:
477 /// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Inherited<'b, 'gcx, 'tcx>).
478 pub struct InheritedBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
479 ccx: &'a CrateCtxt<'a, 'gcx>,
480 infcx: infer::InferCtxtBuilder<'a, 'gcx, 'tcx>
483 impl<'a, 'gcx, 'tcx> CrateCtxt<'a, 'gcx> {
484 pub fn inherited(&'a self, id: ast::NodeId)
485 -> InheritedBuilder<'a, 'gcx, 'tcx> {
486 let tables = ty::TypeckTables::empty();
487 let param_env = ParameterEnvironment::for_item(self.tcx, id);
490 infcx: self.tcx.infer_ctxt((tables, param_env), Reveal::NotSpecializable)
495 impl<'a, 'gcx, 'tcx> InheritedBuilder<'a, 'gcx, 'tcx> {
496 fn enter<F, R>(&'tcx mut self, f: F) -> R
497 where F: for<'b> FnOnce(Inherited<'b, 'gcx, 'tcx>) -> R
500 self.infcx.enter(|infcx| f(Inherited::new(ccx, infcx)))
504 impl<'a, 'gcx, 'tcx> Inherited<'a, 'gcx, 'tcx> {
505 pub fn new(ccx: &'a CrateCtxt<'a, 'gcx>,
506 infcx: InferCtxt<'a, 'gcx, 'tcx>)
511 fulfillment_cx: RefCell::new(traits::FulfillmentContext::new()),
512 locals: RefCell::new(NodeMap()),
513 deferred_call_resolutions: RefCell::new(DefIdMap()),
514 deferred_cast_checks: RefCell::new(Vec::new()),
515 anon_types: RefCell::new(DefIdMap()),
516 deferred_obligations: RefCell::new(Vec::new()),
520 fn normalize_associated_types_in<T>(&self,
522 body_id: ast::NodeId,
525 where T : TypeFoldable<'tcx>
527 assoc::normalize_associated_types_in(self,
528 &mut self.fulfillment_cx.borrow_mut(),
536 struct CheckItemTypesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
537 struct CheckItemBodiesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
539 impl<'a, 'tcx> Visitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> {
540 fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
541 NestedVisitorMap::OnlyBodies(&self.ccx.tcx.hir)
544 fn visit_item(&mut self, i: &'tcx hir::Item) {
545 check_item_type(self.ccx, i);
546 intravisit::walk_item(self, i);
549 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
551 hir::TyArray(_, length) => {
552 check_const_with_type(self.ccx, length, self.ccx.tcx.types.usize, length.node_id);
557 intravisit::walk_ty(self, t);
560 fn visit_expr(&mut self, e: &'tcx hir::Expr) {
562 hir::ExprRepeat(_, count) => {
563 check_const_with_type(self.ccx, count, self.ccx.tcx.types.usize, count.node_id);
568 intravisit::walk_expr(self, e);
572 impl<'a, 'tcx> ItemLikeVisitor<'tcx> for CheckItemBodiesVisitor<'a, 'tcx> {
573 fn visit_item(&mut self, item: &'tcx hir::Item) {
575 hir::ItemFn(ref decl, .., body_id) => {
576 check_bare_fn(self.ccx, &decl, body_id, item.id, item.span);
582 fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) {
583 match trait_item.node {
584 hir::TraitItemKind::Const(_, Some(expr)) => {
585 check_const(self.ccx, expr, trait_item.id)
587 hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Provided(body_id)) => {
588 check_bare_fn(self.ccx, &sig.decl, body_id, trait_item.id, trait_item.span);
590 hir::TraitItemKind::Method(_, hir::TraitMethod::Required(_)) |
591 hir::TraitItemKind::Const(_, None) |
592 hir::TraitItemKind::Type(..) => {
598 fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) {
599 match impl_item.node {
600 hir::ImplItemKind::Const(_, expr) => {
601 check_const(self.ccx, expr, impl_item.id)
603 hir::ImplItemKind::Method(ref sig, body_id) => {
604 check_bare_fn(self.ccx, &sig.decl, body_id, impl_item.id, impl_item.span);
606 hir::ImplItemKind::Type(_) => {
607 // Nothing to do here.
613 pub fn check_wf_new(ccx: &CrateCtxt) -> CompileResult {
614 ccx.tcx.sess.track_errors(|| {
615 let mut visit = wfcheck::CheckTypeWellFormedVisitor::new(ccx);
616 ccx.tcx.visit_all_item_likes_in_krate(DepNode::WfCheck, &mut visit.as_deep_visitor());
620 pub fn check_item_types(ccx: &CrateCtxt) -> CompileResult {
621 ccx.tcx.sess.track_errors(|| {
622 let mut visit = CheckItemTypesVisitor { ccx: ccx };
623 ccx.tcx.visit_all_item_likes_in_krate(DepNode::TypeckItemType,
624 &mut visit.as_deep_visitor());
628 pub fn check_item_bodies(ccx: &CrateCtxt) -> CompileResult {
629 ccx.tcx.sess.track_errors(|| {
630 let mut visit = CheckItemBodiesVisitor { ccx: ccx };
631 ccx.tcx.visit_all_item_likes_in_krate(DepNode::TypeckTables, &mut visit);
633 // Process deferred obligations, now that all functions
634 // bodies have been fully inferred.
635 for (&item_id, obligations) in ccx.deferred_obligations.borrow().iter() {
636 // Use the same DepNode as for the body of the original function/item.
637 let def_id = ccx.tcx.hir.local_def_id(item_id);
638 let _task = ccx.tcx.dep_graph.in_task(DepNode::TypeckTables(def_id));
640 let param_env = ParameterEnvironment::for_item(ccx.tcx, item_id);
641 ccx.tcx.infer_ctxt(param_env, Reveal::NotSpecializable).enter(|infcx| {
642 let mut fulfillment_cx = traits::FulfillmentContext::new();
643 for obligation in obligations.iter().map(|o| o.to_obligation()) {
644 fulfillment_cx.register_predicate_obligation(&infcx, obligation);
647 if let Err(errors) = fulfillment_cx.select_all_or_error(&infcx) {
648 infcx.report_fulfillment_errors(&errors);
655 pub fn check_drop_impls(ccx: &CrateCtxt) -> CompileResult {
656 ccx.tcx.sess.track_errors(|| {
657 let _task = ccx.tcx.dep_graph.in_task(DepNode::Dropck);
658 let drop_trait = match ccx.tcx.lang_items.drop_trait() {
659 Some(id) => ccx.tcx.lookup_trait_def(id), None => { return }
661 drop_trait.for_each_impl(ccx.tcx, |drop_impl_did| {
662 let _task = ccx.tcx.dep_graph.in_task(DepNode::DropckImpl(drop_impl_did));
663 if drop_impl_did.is_local() {
664 match dropck::check_drop_impl(ccx, drop_impl_did) {
667 assert!(ccx.tcx.sess.has_errors());
675 fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
676 decl: &'tcx hir::FnDecl,
677 body_id: hir::BodyId,
680 let body = ccx.tcx.hir.body(body_id);
682 let raw_fty = ccx.tcx.item_type(ccx.tcx.hir.local_def_id(fn_id));
683 let fn_ty = match raw_fty.sty {
684 ty::TyFnDef(.., f) => f,
685 _ => span_bug!(body.value.span, "check_bare_fn: function type expected")
688 check_abi(ccx, span, fn_ty.abi);
690 ccx.inherited(fn_id).enter(|inh| {
691 // Compute the fty from point of view of inside fn.
692 let fn_scope = inh.tcx.region_maps.call_site_extent(fn_id, body_id.node_id);
694 fn_ty.sig.subst(inh.tcx, &inh.parameter_environment.free_substs);
696 inh.tcx.liberate_late_bound_regions(fn_scope, &fn_sig);
698 inh.normalize_associated_types_in(body.value.span, body_id.node_id, &fn_sig);
700 let fcx = check_fn(&inh, fn_ty.unsafety, fn_id, &fn_sig, decl, fn_id, body);
702 fcx.select_all_obligations_and_apply_defaults();
703 fcx.closure_analyze(body);
704 fcx.select_obligations_where_possible();
706 fcx.select_all_obligations_or_error(); // Casts can introduce new obligations.
708 fcx.regionck_fn(fn_id, body);
709 fcx.resolve_type_vars_in_body(body);
713 fn check_abi<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, span: Span, abi: Abi) {
714 if !ccx.tcx.sess.target.target.is_abi_supported(abi) {
715 struct_span_err!(ccx.tcx.sess, span, E0570,
716 "The ABI `{}` is not supported for the current target", abi).emit()
720 struct GatherLocalsVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
721 fcx: &'a FnCtxt<'a, 'gcx, 'tcx>
724 impl<'a, 'gcx, 'tcx> GatherLocalsVisitor<'a, 'gcx, 'tcx> {
725 fn assign(&mut self, span: Span, nid: ast::NodeId, ty_opt: Option<Ty<'tcx>>) -> Ty<'tcx> {
728 // infer the variable's type
729 let var_ty = self.fcx.next_ty_var(TypeVariableOrigin::TypeInference(span));
730 self.fcx.locals.borrow_mut().insert(nid, var_ty);
734 // take type that the user specified
735 self.fcx.locals.borrow_mut().insert(nid, typ);
742 impl<'a, 'gcx, 'tcx> Visitor<'gcx> for GatherLocalsVisitor<'a, 'gcx, 'tcx> {
743 fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'gcx> {
744 NestedVisitorMap::None
747 // Add explicitly-declared locals.
748 fn visit_local(&mut self, local: &'gcx hir::Local) {
749 let o_ty = match local.ty {
750 Some(ref ty) => Some(self.fcx.to_ty(&ty)),
753 self.assign(local.span, local.id, o_ty);
754 debug!("Local variable {:?} is assigned type {}",
756 self.fcx.ty_to_string(
757 self.fcx.locals.borrow().get(&local.id).unwrap().clone()));
758 intravisit::walk_local(self, local);
761 // Add pattern bindings.
762 fn visit_pat(&mut self, p: &'gcx hir::Pat) {
763 if let PatKind::Binding(_, _, ref path1, _) = p.node {
764 let var_ty = self.assign(p.span, p.id, None);
766 self.fcx.require_type_is_sized(var_ty, p.span,
767 traits::VariableType(p.id));
769 debug!("Pattern binding {} is assigned to {} with type {:?}",
771 self.fcx.ty_to_string(
772 self.fcx.locals.borrow().get(&p.id).unwrap().clone()),
775 intravisit::walk_pat(self, p);
778 // Don't descend into the bodies of nested closures
779 fn visit_fn(&mut self, _: intravisit::FnKind<'gcx>, _: &'gcx hir::FnDecl,
780 _: hir::BodyId, _: Span, _: ast::NodeId) { }
783 /// Helper used by check_bare_fn and check_expr_fn. Does the grungy work of checking a function
784 /// body and returns the function context used for that purpose, since in the case of a fn item
785 /// there is still a bit more to do.
788 /// * inherited: other fields inherited from the enclosing fn (if any)
789 fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>,
790 unsafety: hir::Unsafety,
791 unsafety_id: ast::NodeId,
792 fn_sig: &ty::FnSig<'tcx>,
793 decl: &'gcx hir::FnDecl,
795 body: &'gcx hir::Body)
796 -> FnCtxt<'a, 'gcx, 'tcx>
798 let mut fn_sig = fn_sig.clone();
800 debug!("check_fn(sig={:?}, fn_id={})", fn_sig, fn_id);
802 // Create the function context. This is either derived from scratch or,
803 // in the case of function expressions, based on the outer context.
804 let mut fcx = FnCtxt::new(inherited, None, body.value.id);
805 let ret_ty = fn_sig.output();
806 *fcx.ps.borrow_mut() = UnsafetyState::function(unsafety, unsafety_id);
808 fcx.require_type_is_sized(ret_ty, decl.output.span(), traits::ReturnType);
809 fcx.ret_ty = fcx.instantiate_anon_types(&Some(ret_ty));
810 fn_sig = fcx.tcx.mk_fn_sig(fn_sig.inputs().iter().cloned(), &fcx.ret_ty.unwrap(),
813 GatherLocalsVisitor { fcx: &fcx, }.visit_body(body);
815 // Add formal parameters.
816 for (arg_ty, arg) in fn_sig.inputs().iter().zip(&body.arguments) {
817 // The type of the argument must be well-formed.
819 // NB -- this is now checked in wfcheck, but that
820 // currently only results in warnings, so we issue an
821 // old-style WF obligation here so that we still get the
822 // errors that we used to get.
823 fcx.register_old_wf_obligation(arg_ty, arg.pat.span, traits::MiscObligation);
825 // Check the pattern.
826 fcx.check_pat_arg(&arg.pat, arg_ty, true);
827 fcx.write_ty(arg.id, arg_ty);
830 inherited.tables.borrow_mut().liberated_fn_sigs.insert(fn_id, fn_sig);
832 fcx.check_expr_coercable_to_type(&body.value, fcx.ret_ty.unwrap());
837 fn check_struct(ccx: &CrateCtxt, id: ast::NodeId, span: Span) {
838 let def_id = ccx.tcx.hir.local_def_id(id);
839 check_representable(ccx.tcx, span, def_id);
841 if ccx.tcx.lookup_simd(def_id) {
842 check_simd(ccx.tcx, span, def_id);
846 fn check_union(ccx: &CrateCtxt, id: ast::NodeId, span: Span) {
847 check_representable(ccx.tcx, span, ccx.tcx.hir.local_def_id(id));
850 pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
851 debug!("check_item_type(it.id={}, it.name={})",
853 ccx.tcx.item_path_str(ccx.tcx.hir.local_def_id(it.id)));
854 let _indenter = indenter();
856 // Consts can play a role in type-checking, so they are included here.
857 hir::ItemStatic(.., e) |
858 hir::ItemConst(_, e) => check_const(ccx, e, it.id),
859 hir::ItemEnum(ref enum_definition, _) => {
860 check_enum_variants(ccx,
862 &enum_definition.variants,
865 hir::ItemFn(..) => {} // entirely within check_item_body
866 hir::ItemImpl(.., ref impl_item_refs) => {
867 debug!("ItemImpl {} with id {}", it.name, it.id);
868 let impl_def_id = ccx.tcx.hir.local_def_id(it.id);
869 if let Some(impl_trait_ref) = ccx.tcx.impl_trait_ref(impl_def_id) {
870 check_impl_items_against_trait(ccx,
875 let trait_def_id = impl_trait_ref.def_id;
876 check_on_unimplemented(ccx, trait_def_id, it);
879 hir::ItemTrait(..) => {
880 let def_id = ccx.tcx.hir.local_def_id(it.id);
881 check_on_unimplemented(ccx, def_id, it);
883 hir::ItemStruct(..) => {
884 check_struct(ccx, it.id, it.span);
886 hir::ItemUnion(..) => {
887 check_union(ccx, it.id, it.span);
889 hir::ItemTy(_, ref generics) => {
890 let def_id = ccx.tcx.hir.local_def_id(it.id);
891 let pty_ty = ccx.tcx.item_type(def_id);
892 check_bounds_are_used(ccx, generics, pty_ty);
894 hir::ItemForeignMod(ref m) => {
895 check_abi(ccx, it.span, m.abi);
897 if m.abi == Abi::RustIntrinsic {
898 for item in &m.items {
899 intrinsic::check_intrinsic_type(ccx, item);
901 } else if m.abi == Abi::PlatformIntrinsic {
902 for item in &m.items {
903 intrinsic::check_platform_intrinsic_type(ccx, item);
906 for item in &m.items {
907 let generics = ccx.tcx.item_generics(ccx.tcx.hir.local_def_id(item.id));
908 if !generics.types.is_empty() {
909 let mut err = struct_span_err!(ccx.tcx.sess, item.span, E0044,
910 "foreign items may not have type parameters");
911 span_help!(&mut err, item.span,
912 "consider using specialization instead of \
917 if let hir::ForeignItemFn(ref fn_decl, _, _) = item.node {
918 require_c_abi_if_variadic(ccx.tcx, fn_decl, m.abi, item.span);
923 _ => {/* nothing to do */ }
927 fn check_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
930 let generics = ccx.tcx.item_generics(def_id);
931 if let Some(ref attr) = item.attrs.iter().find(|a| {
932 a.check_name("rustc_on_unimplemented")
934 if let Some(istring) = attr.value_str() {
935 let istring = istring.as_str();
936 let parser = Parser::new(&istring);
937 let types = &generics.types;
938 for token in parser {
940 Piece::String(_) => (), // Normal string, no need to check it
941 Piece::NextArgument(a) => match a.position {
942 // `{Self}` is allowed
943 Position::ArgumentNamed(s) if s == "Self" => (),
944 // So is `{A}` if A is a type parameter
945 Position::ArgumentNamed(s) => match types.iter().find(|t| {
950 let name = ccx.tcx.item_name(def_id);
951 span_err!(ccx.tcx.sess, attr.span, E0230,
952 "there is no type parameter \
957 // `{:1}` and `{}` are not to be used
958 Position::ArgumentIs(_) => {
959 span_err!(ccx.tcx.sess, attr.span, E0231,
960 "only named substitution \
961 parameters are allowed");
968 ccx.tcx.sess, attr.span, E0232,
969 "this attribute must have a value")
970 .span_label(attr.span, &format!("attribute requires a value"))
971 .note(&format!("eg `#[rustc_on_unimplemented = \"foo\"]`"))
977 fn report_forbidden_specialization<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
978 impl_item: &hir::ImplItem,
981 let mut err = struct_span_err!(
982 tcx.sess, impl_item.span, E0520,
983 "`{}` specializes an item from a parent `impl`, but \
984 that item is not marked `default`",
986 err.span_label(impl_item.span, &format!("cannot specialize default item `{}`",
989 match tcx.span_of_impl(parent_impl) {
991 err.span_label(span, &"parent `impl` is here");
992 err.note(&format!("to specialize, `{}` in the parent `impl` must be marked `default`",
996 err.note(&format!("parent implementation is in crate `{}`", cname));
1003 fn check_specialization_validity<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
1004 trait_def: &ty::TraitDef,
1006 impl_item: &hir::ImplItem)
1008 let ancestors = trait_def.ancestors(impl_id);
1010 let kind = match impl_item.node {
1011 hir::ImplItemKind::Const(..) => ty::AssociatedKind::Const,
1012 hir::ImplItemKind::Method(..) => ty::AssociatedKind::Method,
1013 hir::ImplItemKind::Type(_) => ty::AssociatedKind::Type
1015 let parent = ancestors.defs(tcx, impl_item.name, kind).skip(1).next()
1016 .map(|node_item| node_item.map(|parent| parent.defaultness));
1018 if let Some(parent) = parent {
1019 if parent.item.is_final() {
1020 report_forbidden_specialization(tcx, impl_item, parent.node.def_id());
1026 fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
1029 impl_trait_ref: ty::TraitRef<'tcx>,
1030 impl_item_refs: &[hir::ImplItemRef]) {
1031 // If the trait reference itself is erroneous (so the compilation is going
1032 // to fail), skip checking the items here -- the `impl_item` table in `tcx`
1033 // isn't populated for such impls.
1034 if impl_trait_ref.references_error() { return; }
1036 // Locate trait definition and items
1038 let trait_def = tcx.lookup_trait_def(impl_trait_ref.def_id);
1039 let mut overridden_associated_type = None;
1041 let impl_items = || impl_item_refs.iter().map(|iiref| ccx.tcx.hir.impl_item(iiref.id));
1043 // Check existing impl methods to see if they are both present in trait
1044 // and compatible with trait signature
1045 for impl_item in impl_items() {
1046 let ty_impl_item = tcx.associated_item(tcx.hir.local_def_id(impl_item.id));
1047 let ty_trait_item = tcx.associated_items(impl_trait_ref.def_id)
1048 .find(|ac| ac.name == ty_impl_item.name);
1050 // Check that impl definition matches trait definition
1051 if let Some(ty_trait_item) = ty_trait_item {
1052 match impl_item.node {
1053 hir::ImplItemKind::Const(..) => {
1054 // Find associated const definition.
1055 if ty_trait_item.kind == ty::AssociatedKind::Const {
1056 compare_const_impl(ccx,
1062 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0323,
1063 "item `{}` is an associated const, \
1064 which doesn't match its trait `{}`",
1067 err.span_label(impl_item.span, &format!("does not match trait"));
1068 // We can only get the spans from local trait definition
1069 // Same for E0324 and E0325
1070 if let Some(trait_span) = tcx.hir.span_if_local(ty_trait_item.def_id) {
1071 err.span_label(trait_span, &format!("item in trait"));
1076 hir::ImplItemKind::Method(_, body_id) => {
1077 let trait_span = tcx.hir.span_if_local(ty_trait_item.def_id);
1078 if ty_trait_item.kind == ty::AssociatedKind::Method {
1079 let err_count = tcx.sess.err_count();
1080 compare_impl_method(ccx,
1087 true); // start with old-broken-mode
1088 if err_count == tcx.sess.err_count() {
1089 // old broken mode did not report an error. Try with the new mode.
1090 compare_impl_method(ccx,
1097 false); // use the new mode
1100 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0324,
1101 "item `{}` is an associated method, \
1102 which doesn't match its trait `{}`",
1105 err.span_label(impl_item.span, &format!("does not match trait"));
1106 if let Some(trait_span) = tcx.hir.span_if_local(ty_trait_item.def_id) {
1107 err.span_label(trait_span, &format!("item in trait"));
1112 hir::ImplItemKind::Type(_) => {
1113 if ty_trait_item.kind == ty::AssociatedKind::Type {
1114 if ty_trait_item.defaultness.has_value() {
1115 overridden_associated_type = Some(impl_item);
1118 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0325,
1119 "item `{}` is an associated type, \
1120 which doesn't match its trait `{}`",
1123 err.span_label(impl_item.span, &format!("does not match trait"));
1124 if let Some(trait_span) = tcx.hir.span_if_local(ty_trait_item.def_id) {
1125 err.span_label(trait_span, &format!("item in trait"));
1133 check_specialization_validity(tcx, trait_def, impl_id, impl_item);
1136 // Check for missing items from trait
1137 let mut missing_items = Vec::new();
1138 let mut invalidated_items = Vec::new();
1139 let associated_type_overridden = overridden_associated_type.is_some();
1140 for trait_item in tcx.associated_items(impl_trait_ref.def_id) {
1141 let is_implemented = trait_def.ancestors(impl_id)
1142 .defs(tcx, trait_item.name, trait_item.kind)
1144 .map(|node_item| !node_item.node.is_from_trait())
1147 if !is_implemented {
1148 if !trait_item.defaultness.has_value() {
1149 missing_items.push(trait_item);
1150 } else if associated_type_overridden {
1151 invalidated_items.push(trait_item.name);
1156 let signature = |item: &ty::AssociatedItem| {
1158 ty::AssociatedKind::Method => {
1159 format!("{}", tcx.item_type(item.def_id).fn_sig().0)
1161 ty::AssociatedKind::Type => format!("type {};", item.name.to_string()),
1162 ty::AssociatedKind::Const => {
1163 format!("const {}: {:?};", item.name.to_string(), tcx.item_type(item.def_id))
1168 if !missing_items.is_empty() {
1169 let mut err = struct_span_err!(tcx.sess, impl_span, E0046,
1170 "not all trait items implemented, missing: `{}`",
1171 missing_items.iter()
1172 .map(|trait_item| trait_item.name.to_string())
1173 .collect::<Vec<_>>().join("`, `"));
1174 err.span_label(impl_span, &format!("missing `{}` in implementation",
1175 missing_items.iter()
1176 .map(|trait_item| trait_item.name.to_string())
1177 .collect::<Vec<_>>().join("`, `")));
1178 for trait_item in missing_items {
1179 if let Some(span) = tcx.hir.span_if_local(trait_item.def_id) {
1180 err.span_label(span, &format!("`{}` from trait", trait_item.name));
1182 err.note(&format!("`{}` from trait: `{}`",
1184 signature(&trait_item)));
1190 if !invalidated_items.is_empty() {
1191 let invalidator = overridden_associated_type.unwrap();
1192 span_err!(tcx.sess, invalidator.span, E0399,
1193 "the following trait items need to be reimplemented \
1194 as `{}` was overridden: `{}`",
1196 invalidated_items.iter()
1197 .map(|name| name.to_string())
1198 .collect::<Vec<_>>().join("`, `"))
1202 /// Checks a constant with a given type.
1203 fn check_const_with_type<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
1205 expected_type: Ty<'tcx>,
1207 let body = ccx.tcx.hir.body(body);
1208 ccx.inherited(id).enter(|inh| {
1209 let fcx = FnCtxt::new(&inh, None, body.value.id);
1210 fcx.require_type_is_sized(expected_type, body.value.span, traits::ConstSized);
1212 // Gather locals in statics (because of block expressions).
1213 // This is technically unnecessary because locals in static items are forbidden,
1214 // but prevents type checking from blowing up before const checking can properly
1216 GatherLocalsVisitor { fcx: &fcx }.visit_body(body);
1218 fcx.check_expr_coercable_to_type(&body.value, expected_type);
1220 fcx.select_all_obligations_and_apply_defaults();
1221 fcx.closure_analyze(body);
1222 fcx.select_obligations_where_possible();
1224 fcx.select_all_obligations_or_error();
1226 fcx.regionck_expr(body);
1227 fcx.resolve_type_vars_in_body(body);
1231 fn check_const<'a, 'tcx>(ccx: &CrateCtxt<'a,'tcx>,
1234 let decl_ty = ccx.tcx.item_type(ccx.tcx.hir.local_def_id(id));
1235 check_const_with_type(ccx, body, decl_ty, id);
1238 /// Checks whether a type can be represented in memory. In particular, it
1239 /// identifies types that contain themselves without indirection through a
1240 /// pointer, which would mean their size is unbounded.
1241 fn check_representable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
1245 let rty = tcx.item_type(item_def_id);
1247 // Check that it is possible to represent this type. This call identifies
1248 // (1) types that contain themselves and (2) types that contain a different
1249 // recursive type. It is only necessary to throw an error on those that
1250 // contain themselves. For case 2, there must be an inner type that will be
1251 // caught by case 1.
1252 match rty.is_representable(tcx, sp) {
1253 Representability::SelfRecursive => {
1254 tcx.recursive_type_with_infinite_size_error(item_def_id).emit();
1257 Representability::Representable | Representability::ContainsRecursive => (),
1262 pub fn check_simd<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span, def_id: DefId) {
1263 let t = tcx.item_type(def_id);
1265 ty::TyAdt(def, substs) if def.is_struct() => {
1266 let fields = &def.struct_variant().fields;
1267 if fields.is_empty() {
1268 span_err!(tcx.sess, sp, E0075, "SIMD vector cannot be empty");
1271 let e = fields[0].ty(tcx, substs);
1272 if !fields.iter().all(|f| f.ty(tcx, substs) == e) {
1273 struct_span_err!(tcx.sess, sp, E0076, "SIMD vector should be homogeneous")
1274 .span_label(sp, &format!("SIMD elements must have the same type"))
1279 ty::TyParam(_) => { /* struct<T>(T, T, T, T) is ok */ }
1280 _ if e.is_machine() => { /* struct(u8, u8, u8, u8) is ok */ }
1282 span_err!(tcx.sess, sp, E0077,
1283 "SIMD vector element type should be machine type");
1292 #[allow(trivial_numeric_casts)]
1293 pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
1295 vs: &'tcx [hir::Variant],
1297 let def_id = ccx.tcx.hir.local_def_id(id);
1298 let hint = *ccx.tcx.lookup_repr_hints(def_id).get(0).unwrap_or(&attr::ReprAny);
1300 if hint != attr::ReprAny && vs.is_empty() {
1302 ccx.tcx.sess, sp, E0084,
1303 "unsupported representation for zero-variant enum")
1304 .span_label(sp, &format!("unsupported enum representation"))
1308 let repr_type_ty = ccx.tcx.enum_repr_type(Some(&hint)).to_ty(ccx.tcx);
1309 if repr_type_ty == ccx.tcx.types.i128 || repr_type_ty == ccx.tcx.types.u128 {
1310 if !ccx.tcx.sess.features.borrow().i128_type {
1311 emit_feature_err(&ccx.tcx.sess.parse_sess,
1312 "i128_type", sp, GateIssue::Language, "128-bit type is unstable");
1317 if let Some(e) = v.node.disr_expr {
1318 check_const_with_type(ccx, e, repr_type_ty, e.node_id);
1322 let def_id = ccx.tcx.hir.local_def_id(id);
1324 let variants = &ccx.tcx.lookup_adt_def(def_id).variants;
1325 let mut disr_vals: Vec<ty::Disr> = Vec::new();
1326 for (v, variant) in vs.iter().zip(variants.iter()) {
1327 let current_disr_val = variant.disr_val;
1329 // Check for duplicate discriminant values
1330 if let Some(i) = disr_vals.iter().position(|&x| x == current_disr_val) {
1331 let variant_i_node_id = ccx.tcx.hir.as_local_node_id(variants[i].did).unwrap();
1332 let variant_i = ccx.tcx.hir.expect_variant(variant_i_node_id);
1333 let i_span = match variant_i.node.disr_expr {
1334 Some(expr) => ccx.tcx.hir.span(expr.node_id),
1335 None => ccx.tcx.hir.span(variant_i_node_id)
1337 let span = match v.node.disr_expr {
1338 Some(expr) => ccx.tcx.hir.span(expr.node_id),
1341 struct_span_err!(ccx.tcx.sess, span, E0081,
1342 "discriminant value `{}` already exists", disr_vals[i])
1343 .span_label(i_span, &format!("first use of `{}`", disr_vals[i]))
1344 .span_label(span , &format!("enum already has `{}`", disr_vals[i]))
1347 disr_vals.push(current_disr_val);
1350 check_representable(ccx.tcx, sp, def_id);
1353 impl<'a, 'gcx, 'tcx> AstConv<'gcx, 'tcx> for FnCtxt<'a, 'gcx, 'tcx> {
1354 fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
1356 fn ast_ty_to_ty_cache(&self) -> &RefCell<NodeMap<Ty<'tcx>>> {
1357 &self.ast_ty_to_ty_cache
1360 fn get_generics(&self, _: Span, id: DefId)
1361 -> Result<&'tcx ty::Generics<'tcx>, ErrorReported>
1363 Ok(self.tcx().item_generics(id))
1366 fn get_item_type(&self, _: Span, id: DefId) -> Result<Ty<'tcx>, ErrorReported>
1368 Ok(self.tcx().item_type(id))
1371 fn get_trait_def(&self, _: Span, id: DefId)
1372 -> Result<&'tcx ty::TraitDef, ErrorReported>
1374 Ok(self.tcx().lookup_trait_def(id))
1377 fn ensure_super_predicates(&self, _: Span, _: DefId) -> Result<(), ErrorReported> {
1378 // all super predicates are ensured during collect pass
1382 fn get_free_substs(&self) -> Option<&Substs<'tcx>> {
1383 Some(&self.parameter_environment.free_substs)
1386 fn get_type_parameter_bounds(&self,
1388 node_id: ast::NodeId)
1389 -> Result<Vec<ty::PolyTraitRef<'tcx>>, ErrorReported>
1391 let def = self.tcx.type_parameter_def(node_id);
1392 let r = self.parameter_environment
1395 .filter_map(|predicate| {
1397 ty::Predicate::Trait(ref data) => {
1398 if data.0.self_ty().is_param(def.index) {
1399 Some(data.to_poly_trait_ref())
1413 fn ty_infer(&self, span: Span) -> Ty<'tcx> {
1414 self.next_ty_var(TypeVariableOrigin::TypeInference(span))
1417 fn ty_infer_for_def(&self,
1418 ty_param_def: &ty::TypeParameterDef<'tcx>,
1419 substs: &[Kind<'tcx>],
1420 span: Span) -> Ty<'tcx> {
1421 self.type_var_for_def(span, ty_param_def, substs)
1424 fn projected_ty_from_poly_trait_ref(&self,
1426 poly_trait_ref: ty::PolyTraitRef<'tcx>,
1427 item_name: ast::Name)
1430 let (trait_ref, _) =
1431 self.replace_late_bound_regions_with_fresh_var(
1433 infer::LateBoundRegionConversionTime::AssocTypeProjection(item_name),
1436 self.normalize_associated_type(span, trait_ref, item_name)
1439 fn projected_ty(&self,
1441 trait_ref: ty::TraitRef<'tcx>,
1442 item_name: ast::Name)
1445 self.normalize_associated_type(span, trait_ref, item_name)
1448 fn set_tainted_by_errors(&self) {
1449 self.infcx.set_tainted_by_errors()
1453 impl<'a, 'gcx, 'tcx> RegionScope for FnCtxt<'a, 'gcx, 'tcx> {
1454 fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> {
1455 Some(self.base_object_lifetime_default(span))
1458 fn base_object_lifetime_default(&self, span: Span) -> ty::Region {
1459 // RFC #599 specifies that object lifetime defaults take
1460 // precedence over other defaults. But within a fn body we
1461 // don't have a *default* region, rather we use inference to
1462 // find the *correct* region, which is strictly more general
1463 // (and anyway, within a fn body the right region may not even
1464 // be something the user can write explicitly, since it might
1465 // be some expression).
1466 *self.next_region_var(infer::MiscVariable(span))
1469 fn anon_region(&self, span: Span)
1470 -> Result<ty::Region, Option<Vec<ElisionFailureInfo>>> {
1471 Ok(*self.next_region_var(infer::MiscVariable(span)))
1475 /// Controls whether the arguments are tupled. This is used for the call
1478 /// Tupling means that all call-side arguments are packed into a tuple and
1479 /// passed as a single parameter. For example, if tupling is enabled, this
1482 /// fn f(x: (isize, isize))
1484 /// Can be called as:
1491 #[derive(Clone, Eq, PartialEq)]
1492 enum TupleArgumentsFlag {
1497 impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
1498 pub fn new(inh: &'a Inherited<'a, 'gcx, 'tcx>,
1499 rty: Option<Ty<'tcx>>,
1500 body_id: ast::NodeId)
1501 -> FnCtxt<'a, 'gcx, 'tcx> {
1503 ast_ty_to_ty_cache: RefCell::new(NodeMap()),
1505 writeback_errors: Cell::new(false),
1506 err_count_on_creation: inh.tcx.sess.err_count(),
1508 ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal,
1509 ast::CRATE_NODE_ID)),
1510 diverges: Cell::new(Diverges::Maybe),
1511 has_errors: Cell::new(false),
1512 enclosing_loops: RefCell::new(EnclosingLoops {
1520 pub fn param_env(&self) -> &ty::ParameterEnvironment<'gcx> {
1521 &self.parameter_environment
1524 pub fn sess(&self) -> &Session {
1528 pub fn err_count_since_creation(&self) -> usize {
1529 self.tcx.sess.err_count() - self.err_count_on_creation
1532 /// Produce warning on the given node, if the current point in the
1533 /// function is unreachable, and there hasn't been another warning.
1534 fn warn_if_unreachable(&self, id: ast::NodeId, span: Span, kind: &str) {
1535 if self.diverges.get() == Diverges::Always {
1536 self.diverges.set(Diverges::WarnedAlways);
1538 self.tcx.sess.add_lint(lint::builtin::UNREACHABLE_CODE,
1540 format!("unreachable {}", kind));
1546 code: ObligationCauseCode<'tcx>)
1547 -> ObligationCause<'tcx> {
1548 ObligationCause::new(span, self.body_id, code)
1551 pub fn misc(&self, span: Span) -> ObligationCause<'tcx> {
1552 self.cause(span, ObligationCauseCode::MiscObligation)
1555 /// Resolves type variables in `ty` if possible. Unlike the infcx
1556 /// version (resolve_type_vars_if_possible), this version will
1557 /// also select obligations if it seems useful, in an effort
1558 /// to get more type information.
1559 fn resolve_type_vars_with_obligations(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> {
1560 debug!("resolve_type_vars_with_obligations(ty={:?})", ty);
1562 // No TyInfer()? Nothing needs doing.
1563 if !ty.has_infer_types() {
1564 debug!("resolve_type_vars_with_obligations: ty={:?}", ty);
1568 // If `ty` is a type variable, see whether we already know what it is.
1569 ty = self.resolve_type_vars_if_possible(&ty);
1570 if !ty.has_infer_types() {
1571 debug!("resolve_type_vars_with_obligations: ty={:?}", ty);
1575 // If not, try resolving pending obligations as much as
1576 // possible. This can help substantially when there are
1577 // indirect dependencies that don't seem worth tracking
1579 self.select_obligations_where_possible();
1580 ty = self.resolve_type_vars_if_possible(&ty);
1582 debug!("resolve_type_vars_with_obligations: ty={:?}", ty);
1586 fn record_deferred_call_resolution(&self,
1587 closure_def_id: DefId,
1588 r: DeferredCallResolutionHandler<'gcx, 'tcx>) {
1589 let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
1590 deferred_call_resolutions.entry(closure_def_id).or_insert(vec![]).push(r);
1593 fn remove_deferred_call_resolutions(&self,
1594 closure_def_id: DefId)
1595 -> Vec<DeferredCallResolutionHandler<'gcx, 'tcx>>
1597 let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
1598 deferred_call_resolutions.remove(&closure_def_id).unwrap_or(Vec::new())
1601 pub fn tag(&self) -> String {
1602 let self_ptr: *const FnCtxt = self;
1603 format!("{:?}", self_ptr)
1606 pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> Ty<'tcx> {
1607 match self.locals.borrow().get(&nid) {
1610 span_bug!(span, "no type for local variable {}",
1611 self.tcx.hir.node_to_string(nid));
1617 pub fn write_ty(&self, node_id: ast::NodeId, ty: Ty<'tcx>) {
1618 debug!("write_ty({}, {:?}) in fcx {}",
1619 node_id, ty, self.tag());
1620 self.tables.borrow_mut().node_types.insert(node_id, ty);
1622 if ty.references_error() {
1623 self.has_errors.set(true);
1626 // FIXME(canndrew): This is_never should probably be an is_uninhabited
1627 if ty.is_never() || self.type_var_diverges(ty) {
1628 self.diverges.set(self.diverges.get() | Diverges::Always);
1632 pub fn write_substs(&self, node_id: ast::NodeId, substs: ty::ItemSubsts<'tcx>) {
1633 if !substs.substs.is_noop() {
1634 debug!("write_substs({}, {:?}) in fcx {}",
1639 self.tables.borrow_mut().item_substs.insert(node_id, substs);
1643 pub fn write_autoderef_adjustment(&self,
1644 node_id: ast::NodeId,
1646 adjusted_ty: Ty<'tcx>) {
1647 self.write_adjustment(node_id, adjustment::Adjustment {
1648 kind: adjustment::Adjust::DerefRef {
1657 pub fn write_adjustment(&self,
1658 node_id: ast::NodeId,
1659 adj: adjustment::Adjustment<'tcx>) {
1660 debug!("write_adjustment(node_id={}, adj={:?})", node_id, adj);
1662 if adj.is_identity() {
1666 self.tables.borrow_mut().adjustments.insert(node_id, adj);
1669 /// Basically whenever we are converting from a type scheme into
1670 /// the fn body space, we always want to normalize associated
1671 /// types as well. This function combines the two.
1672 fn instantiate_type_scheme<T>(&self,
1674 substs: &Substs<'tcx>,
1677 where T : TypeFoldable<'tcx>
1679 let value = value.subst(self.tcx, substs);
1680 let result = self.normalize_associated_types_in(span, &value);
1681 debug!("instantiate_type_scheme(value={:?}, substs={:?}) = {:?}",
1688 /// As `instantiate_type_scheme`, but for the bounds found in a
1689 /// generic type scheme.
1690 fn instantiate_bounds(&self, span: Span, def_id: DefId, substs: &Substs<'tcx>)
1691 -> ty::InstantiatedPredicates<'tcx> {
1692 let bounds = self.tcx.item_predicates(def_id);
1693 let result = bounds.instantiate(self.tcx, substs);
1694 let result = self.normalize_associated_types_in(span, &result.predicates);
1695 debug!("instantiate_bounds(bounds={:?}, substs={:?}) = {:?}",
1699 ty::InstantiatedPredicates {
1704 /// Replace all anonymized types with fresh inference variables
1705 /// and record them for writeback.
1706 fn instantiate_anon_types<T: TypeFoldable<'tcx>>(&self, value: &T) -> T {
1707 value.fold_with(&mut BottomUpFolder { tcx: self.tcx, fldop: |ty| {
1708 if let ty::TyAnon(def_id, substs) = ty.sty {
1709 // Use the same type variable if the exact same TyAnon appears more
1710 // than once in the return type (e.g. if it's pased to a type alias).
1711 if let Some(ty_var) = self.anon_types.borrow().get(&def_id) {
1714 let span = self.tcx.def_span(def_id);
1715 let ty_var = self.next_ty_var(TypeVariableOrigin::TypeInference(span));
1716 self.anon_types.borrow_mut().insert(def_id, ty_var);
1718 let item_predicates = self.tcx.item_predicates(def_id);
1719 let bounds = item_predicates.instantiate(self.tcx, substs);
1721 for predicate in bounds.predicates {
1722 // Change the predicate to refer to the type variable,
1723 // which will be the concrete type, instead of the TyAnon.
1724 // This also instantiates nested `impl Trait`.
1725 let predicate = self.instantiate_anon_types(&predicate);
1727 // Require that the predicate holds for the concrete type.
1728 let cause = traits::ObligationCause::new(span, self.body_id,
1729 traits::ReturnType);
1730 self.register_predicate(traits::Obligation::new(cause, predicate));
1740 fn normalize_associated_types_in<T>(&self, span: Span, value: &T) -> T
1741 where T : TypeFoldable<'tcx>
1743 self.inh.normalize_associated_types_in(span, self.body_id, value)
1746 fn normalize_associated_type(&self,
1748 trait_ref: ty::TraitRef<'tcx>,
1749 item_name: ast::Name)
1752 let cause = traits::ObligationCause::new(span,
1754 traits::ObligationCauseCode::MiscObligation);
1757 .normalize_projection_type(self,
1759 trait_ref: trait_ref,
1760 item_name: item_name,
1765 pub fn write_nil(&self, node_id: ast::NodeId) {
1766 self.write_ty(node_id, self.tcx.mk_nil());
1769 pub fn write_never(&self, node_id: ast::NodeId) {
1770 self.write_ty(node_id, self.tcx.types.never);
1773 pub fn write_error(&self, node_id: ast::NodeId) {
1774 self.write_ty(node_id, self.tcx.types.err);
1777 pub fn require_type_meets(&self,
1780 code: traits::ObligationCauseCode<'tcx>,
1783 self.register_bound(
1786 traits::ObligationCause::new(span, self.body_id, code));
1789 pub fn require_type_is_sized(&self,
1792 code: traits::ObligationCauseCode<'tcx>)
1794 let lang_item = self.tcx.require_lang_item(lang_items::SizedTraitLangItem);
1795 self.require_type_meets(ty, span, code, lang_item);
1798 pub fn register_bound(&self,
1801 cause: traits::ObligationCause<'tcx>)
1803 self.fulfillment_cx.borrow_mut()
1804 .register_bound(self, ty, def_id, cause);
1807 pub fn register_predicate(&self,
1808 obligation: traits::PredicateObligation<'tcx>)
1810 debug!("register_predicate({:?})",
1814 .register_predicate_obligation(self, obligation);
1817 pub fn register_predicates(&self,
1818 obligations: Vec<traits::PredicateObligation<'tcx>>)
1820 for obligation in obligations {
1821 self.register_predicate(obligation);
1825 pub fn register_infer_ok_obligations<T>(&self, infer_ok: InferOk<'tcx, T>) -> T {
1826 self.register_predicates(infer_ok.obligations);
1830 pub fn to_ty(&self, ast_t: &hir::Ty) -> Ty<'tcx> {
1831 let t = AstConv::ast_ty_to_ty(self, self, ast_t);
1832 self.register_wf_obligation(t, ast_t.span, traits::MiscObligation);
1836 pub fn node_ty(&self, id: ast::NodeId) -> Ty<'tcx> {
1837 match self.tables.borrow().node_types.get(&id) {
1839 None if self.err_count_since_creation() != 0 => self.tcx.types.err,
1841 bug!("no type for node {}: {} in fcx {}",
1842 id, self.tcx.hir.node_to_string(id),
1848 pub fn opt_node_ty_substs<F>(&self,
1851 F: FnOnce(&ty::ItemSubsts<'tcx>),
1853 if let Some(s) = self.tables.borrow().item_substs.get(&id) {
1858 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1859 /// outlive the region `r`.
1860 pub fn register_region_obligation(&self,
1862 region: &'tcx ty::Region,
1863 cause: traits::ObligationCause<'tcx>)
1865 let mut fulfillment_cx = self.fulfillment_cx.borrow_mut();
1866 fulfillment_cx.register_region_obligation(ty, region, cause);
1869 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1870 /// outlive the region `r`.
1871 pub fn register_wf_obligation(&self,
1874 code: traits::ObligationCauseCode<'tcx>)
1876 // WF obligations never themselves fail, so no real need to give a detailed cause:
1877 let cause = traits::ObligationCause::new(span, self.body_id, code);
1878 self.register_predicate(traits::Obligation::new(cause, ty::Predicate::WellFormed(ty)));
1881 pub fn register_old_wf_obligation(&self,
1884 code: traits::ObligationCauseCode<'tcx>)
1886 // Registers an "old-style" WF obligation that uses the
1887 // implicator code. This is basically a buggy version of
1888 // `register_wf_obligation` that is being kept around
1889 // temporarily just to help with phasing in the newer rules.
1891 // FIXME(#27579) all uses of this should be migrated to register_wf_obligation eventually
1892 let cause = traits::ObligationCause::new(span, self.body_id, code);
1893 self.register_region_obligation(ty, self.tcx.mk_region(ty::ReEmpty), cause);
1896 /// Registers obligations that all types appearing in `substs` are well-formed.
1897 pub fn add_wf_bounds(&self, substs: &Substs<'tcx>, expr: &hir::Expr)
1899 for ty in substs.types() {
1900 self.register_wf_obligation(ty, expr.span, traits::MiscObligation);
1904 /// Given a fully substituted set of bounds (`generic_bounds`), and the values with which each
1905 /// type/region parameter was instantiated (`substs`), creates and registers suitable
1906 /// trait/region obligations.
1908 /// For example, if there is a function:
1911 /// fn foo<'a,T:'a>(...)
1914 /// and a reference:
1920 /// Then we will create a fresh region variable `'$0` and a fresh type variable `$1` for `'a`
1921 /// and `T`. This routine will add a region obligation `$1:'$0` and register it locally.
1922 pub fn add_obligations_for_parameters(&self,
1923 cause: traits::ObligationCause<'tcx>,
1924 predicates: &ty::InstantiatedPredicates<'tcx>)
1926 assert!(!predicates.has_escaping_regions());
1928 debug!("add_obligations_for_parameters(predicates={:?})",
1931 for obligation in traits::predicates_for_generics(cause, predicates) {
1932 self.register_predicate(obligation);
1936 // FIXME(arielb1): use this instead of field.ty everywhere
1937 // Only for fields! Returns <none> for methods>
1938 // Indifferent to privacy flags
1939 pub fn field_ty(&self,
1941 field: &'tcx ty::FieldDef,
1942 substs: &Substs<'tcx>)
1945 self.normalize_associated_types_in(span,
1946 &field.ty(self.tcx, substs))
1949 fn check_casts(&self) {
1950 let mut deferred_cast_checks = self.deferred_cast_checks.borrow_mut();
1951 for cast in deferred_cast_checks.drain(..) {
1956 /// Apply "fallbacks" to some types
1957 /// ! gets replaced with (), unconstrained ints with i32, and unconstrained floats with f64.
1958 fn default_type_parameters(&self) {
1959 use rustc::ty::error::UnconstrainedNumeric::Neither;
1960 use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1962 // Defaulting inference variables becomes very dubious if we have
1963 // encountered type-checking errors. Therefore, if we think we saw
1964 // some errors in this function, just resolve all uninstanted type
1965 // varibles to TyError.
1966 if self.is_tainted_by_errors() {
1967 for ty in &self.unsolved_variables() {
1968 if let ty::TyInfer(_) = self.shallow_resolve(ty).sty {
1969 debug!("default_type_parameters: defaulting `{:?}` to error", ty);
1970 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx().types.err);
1976 for ty in &self.unsolved_variables() {
1977 let resolved = self.resolve_type_vars_if_possible(ty);
1978 if self.type_var_diverges(resolved) {
1979 debug!("default_type_parameters: defaulting `{:?}` to `!` because it diverges",
1981 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
1982 self.tcx.mk_diverging_default());
1984 match self.type_is_unconstrained_numeric(resolved) {
1985 UnconstrainedInt => {
1986 debug!("default_type_parameters: defaulting `{:?}` to `i32`",
1988 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
1990 UnconstrainedFloat => {
1991 debug!("default_type_parameters: defaulting `{:?}` to `f32`",
1993 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
2001 fn select_all_obligations_and_apply_defaults(&self) {
2002 if self.tcx.sess.features.borrow().default_type_parameter_fallback {
2003 self.new_select_all_obligations_and_apply_defaults();
2005 self.old_select_all_obligations_and_apply_defaults();
2009 // Implements old type inference fallback algorithm
2010 fn old_select_all_obligations_and_apply_defaults(&self) {
2011 self.select_obligations_where_possible();
2012 self.default_type_parameters();
2013 self.select_obligations_where_possible();
2016 fn new_select_all_obligations_and_apply_defaults(&self) {
2017 use rustc::ty::error::UnconstrainedNumeric::Neither;
2018 use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
2020 // For the time being this errs on the side of being memory wasteful but provides better
2022 // let type_variables = self.type_variables.clone();
2024 // There is a possibility that this algorithm will have to run an arbitrary number of times
2025 // to terminate so we bound it by the compiler's recursion limit.
2026 for _ in 0..self.tcx.sess.recursion_limit.get() {
2027 // First we try to solve all obligations, it is possible that the last iteration
2028 // has made it possible to make more progress.
2029 self.select_obligations_where_possible();
2031 let mut conflicts = Vec::new();
2033 // Collect all unsolved type, integral and floating point variables.
2034 let unsolved_variables = self.unsolved_variables();
2036 // We must collect the defaults *before* we do any unification. Because we have
2037 // directly attached defaults to the type variables any unification that occurs
2038 // will erase defaults causing conflicting defaults to be completely ignored.
2039 let default_map: FxHashMap<_, _> =
2042 .filter_map(|t| self.default(t).map(|d| (t, d)))
2045 let mut unbound_tyvars = FxHashSet();
2047 debug!("select_all_obligations_and_apply_defaults: defaults={:?}", default_map);
2049 // We loop over the unsolved variables, resolving them and if they are
2050 // and unconstrainted numeric type we add them to the set of unbound
2051 // variables. We do this so we only apply literal fallback to type
2052 // variables without defaults.
2053 for ty in &unsolved_variables {
2054 let resolved = self.resolve_type_vars_if_possible(ty);
2055 if self.type_var_diverges(resolved) {
2056 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
2057 self.tcx.mk_diverging_default());
2059 match self.type_is_unconstrained_numeric(resolved) {
2060 UnconstrainedInt | UnconstrainedFloat => {
2061 unbound_tyvars.insert(resolved);
2068 // We now remove any numeric types that also have defaults, and instead insert
2069 // the type variable with a defined fallback.
2070 for ty in &unsolved_variables {
2071 if let Some(_default) = default_map.get(ty) {
2072 let resolved = self.resolve_type_vars_if_possible(ty);
2074 debug!("select_all_obligations_and_apply_defaults: \
2075 ty: {:?} with default: {:?}",
2078 match resolved.sty {
2079 ty::TyInfer(ty::TyVar(_)) => {
2080 unbound_tyvars.insert(ty);
2083 ty::TyInfer(ty::IntVar(_)) | ty::TyInfer(ty::FloatVar(_)) => {
2084 unbound_tyvars.insert(ty);
2085 if unbound_tyvars.contains(resolved) {
2086 unbound_tyvars.remove(resolved);
2095 // If there are no more fallbacks to apply at this point we have applied all possible
2096 // defaults and type inference will proceed as normal.
2097 if unbound_tyvars.is_empty() {
2101 // Finally we go through each of the unbound type variables and unify them with
2102 // the proper fallback, reporting a conflicting default error if any of the
2103 // unifications fail. We know it must be a conflicting default because the
2104 // variable would only be in `unbound_tyvars` and have a concrete value if
2105 // it had been solved by previously applying a default.
2107 // We wrap this in a transaction for error reporting, if we detect a conflict
2108 // we will rollback the inference context to its prior state so we can probe
2109 // for conflicts and correctly report them.
2112 let _ = self.commit_if_ok(|_: &infer::CombinedSnapshot| {
2113 for ty in &unbound_tyvars {
2114 if self.type_var_diverges(ty) {
2115 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
2116 self.tcx.mk_diverging_default());
2118 match self.type_is_unconstrained_numeric(ty) {
2119 UnconstrainedInt => {
2120 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
2122 UnconstrainedFloat => {
2123 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
2126 if let Some(default) = default_map.get(ty) {
2127 let default = default.clone();
2128 match self.eq_types(false,
2129 &self.misc(default.origin_span),
2132 Ok(ok) => self.register_infer_ok_obligations(ok),
2133 Err(_) => conflicts.push((*ty, default)),
2141 // If there are conflicts we rollback, otherwise commit
2142 if conflicts.len() > 0 {
2149 if conflicts.len() > 0 {
2150 // Loop through each conflicting default, figuring out the default that caused
2151 // a unification failure and then report an error for each.
2152 for (conflict, default) in conflicts {
2153 let conflicting_default =
2154 self.find_conflicting_default(&unbound_tyvars, &default_map, conflict)
2155 .unwrap_or(type_variable::Default {
2156 ty: self.next_ty_var(
2157 TypeVariableOrigin::MiscVariable(syntax_pos::DUMMY_SP)),
2158 origin_span: syntax_pos::DUMMY_SP,
2159 // what do I put here?
2160 def_id: self.tcx.hir.local_def_id(ast::CRATE_NODE_ID)
2163 // This is to ensure that we elimnate any non-determinism from the error
2164 // reporting by fixing an order, it doesn't matter what order we choose
2165 // just that it is consistent.
2166 let (first_default, second_default) =
2167 if default.def_id < conflicting_default.def_id {
2168 (default, conflicting_default)
2170 (conflicting_default, default)
2174 self.report_conflicting_default_types(
2175 first_default.origin_span,
2183 self.select_obligations_where_possible();
2186 // For use in error handling related to default type parameter fallback. We explicitly
2187 // apply the default that caused conflict first to a local version of the type variable
2188 // table then apply defaults until we find a conflict. That default must be the one
2189 // that caused conflict earlier.
2190 fn find_conflicting_default(&self,
2191 unbound_vars: &FxHashSet<Ty<'tcx>>,
2192 default_map: &FxHashMap<&Ty<'tcx>, type_variable::Default<'tcx>>,
2194 -> Option<type_variable::Default<'tcx>> {
2195 use rustc::ty::error::UnconstrainedNumeric::Neither;
2196 use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
2198 // Ensure that we apply the conflicting default first
2199 let mut unbound_tyvars = Vec::with_capacity(unbound_vars.len() + 1);
2200 unbound_tyvars.push(conflict);
2201 unbound_tyvars.extend(unbound_vars.iter());
2203 let mut result = None;
2204 // We run the same code as above applying defaults in order, this time when
2205 // we find the conflict we just return it for error reporting above.
2207 // We also run this inside snapshot that never commits so we can do error
2208 // reporting for more then one conflict.
2209 for ty in &unbound_tyvars {
2210 if self.type_var_diverges(ty) {
2211 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
2212 self.tcx.mk_diverging_default());
2214 match self.type_is_unconstrained_numeric(ty) {
2215 UnconstrainedInt => {
2216 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
2218 UnconstrainedFloat => {
2219 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
2222 if let Some(default) = default_map.get(ty) {
2223 let default = default.clone();
2224 match self.eq_types(false,
2225 &self.misc(default.origin_span),
2228 Ok(ok) => self.register_infer_ok_obligations(ok),
2230 result = Some(default);
2242 fn select_all_obligations_or_error(&self) {
2243 debug!("select_all_obligations_or_error");
2245 // upvar inference should have ensured that all deferred call
2246 // resolutions are handled by now.
2247 assert!(self.deferred_call_resolutions.borrow().is_empty());
2249 self.select_all_obligations_and_apply_defaults();
2251 let mut fulfillment_cx = self.fulfillment_cx.borrow_mut();
2253 // Steal the deferred obligations before the fulfillment
2254 // context can turn all of them into errors.
2255 let obligations = fulfillment_cx.take_deferred_obligations();
2256 self.deferred_obligations.borrow_mut().extend(obligations);
2258 match fulfillment_cx.select_all_or_error(self) {
2260 Err(errors) => { self.report_fulfillment_errors(&errors); }
2264 /// Select as many obligations as we can at present.
2265 fn select_obligations_where_possible(&self) {
2266 match self.fulfillment_cx.borrow_mut().select_where_possible(self) {
2268 Err(errors) => { self.report_fulfillment_errors(&errors); }
2272 /// For the overloaded lvalue expressions (`*x`, `x[3]`), the trait
2273 /// returns a type of `&T`, but the actual type we assign to the
2274 /// *expression* is `T`. So this function just peels off the return
2275 /// type by one layer to yield `T`.
2276 fn make_overloaded_lvalue_return_type(&self,
2277 method: MethodCallee<'tcx>)
2278 -> ty::TypeAndMut<'tcx>
2280 // extract method return type, which will be &T;
2281 // all LB regions should have been instantiated during method lookup
2282 let ret_ty = method.ty.fn_ret();
2283 let ret_ty = self.tcx.no_late_bound_regions(&ret_ty).unwrap();
2285 // method returns &T, but the type as visible to user is T, so deref
2286 ret_ty.builtin_deref(true, NoPreference).unwrap()
2289 fn lookup_indexing(&self,
2291 base_expr: &'gcx hir::Expr,
2294 lvalue_pref: LvaluePreference)
2295 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2297 // FIXME(#18741) -- this is almost but not quite the same as the
2298 // autoderef that normal method probing does. They could likely be
2301 let mut autoderef = self.autoderef(base_expr.span, base_ty);
2303 while let Some((adj_ty, autoderefs)) = autoderef.next() {
2304 if let Some(final_mt) = self.try_index_step(
2305 MethodCall::expr(expr.id),
2306 expr, base_expr, adj_ty, autoderefs,
2307 false, lvalue_pref, idx_ty)
2309 autoderef.finalize(lvalue_pref, Some(base_expr));
2310 return Some(final_mt);
2313 if let ty::TyArray(element_ty, _) = adj_ty.sty {
2314 autoderef.finalize(lvalue_pref, Some(base_expr));
2315 let adjusted_ty = self.tcx.mk_slice(element_ty);
2316 return self.try_index_step(
2317 MethodCall::expr(expr.id), expr, base_expr,
2318 adjusted_ty, autoderefs, true, lvalue_pref, idx_ty);
2321 autoderef.unambiguous_final_ty();
2325 /// To type-check `base_expr[index_expr]`, we progressively autoderef
2326 /// (and otherwise adjust) `base_expr`, looking for a type which either
2327 /// supports builtin indexing or overloaded indexing.
2328 /// This loop implements one step in that search; the autoderef loop
2329 /// is implemented by `lookup_indexing`.
2330 fn try_index_step(&self,
2331 method_call: MethodCall,
2333 base_expr: &'gcx hir::Expr,
2334 adjusted_ty: Ty<'tcx>,
2337 lvalue_pref: LvaluePreference,
2339 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2342 debug!("try_index_step(expr={:?}, base_expr.id={:?}, adjusted_ty={:?}, \
2343 autoderefs={}, unsize={}, index_ty={:?})",
2351 let input_ty = self.next_ty_var(TypeVariableOrigin::AutoDeref(base_expr.span));
2353 // First, try built-in indexing.
2354 match (adjusted_ty.builtin_index(), &index_ty.sty) {
2355 (Some(ty), &ty::TyUint(ast::UintTy::Us)) | (Some(ty), &ty::TyInfer(ty::IntVar(_))) => {
2356 debug!("try_index_step: success, using built-in indexing");
2357 // If we had `[T; N]`, we should've caught it before unsizing to `[T]`.
2359 self.write_autoderef_adjustment(base_expr.id, autoderefs, adjusted_ty);
2360 return Some((tcx.types.usize, ty));
2365 // Try `IndexMut` first, if preferred.
2366 let method = match (lvalue_pref, tcx.lang_items.index_mut_trait()) {
2367 (PreferMutLvalue, Some(trait_did)) => {
2368 self.lookup_method_in_trait_adjusted(expr.span,
2370 Symbol::intern("index_mut"),
2375 Some(vec![input_ty]))
2380 // Otherwise, fall back to `Index`.
2381 let method = match (method, tcx.lang_items.index_trait()) {
2382 (None, Some(trait_did)) => {
2383 self.lookup_method_in_trait_adjusted(expr.span,
2385 Symbol::intern("index"),
2390 Some(vec![input_ty]))
2392 (method, _) => method,
2395 // If some lookup succeeds, write callee into table and extract index/element
2396 // type from the method signature.
2397 // If some lookup succeeded, install method in table
2398 method.map(|method| {
2399 debug!("try_index_step: success, using overloaded indexing");
2400 self.tables.borrow_mut().method_map.insert(method_call, method);
2401 (input_ty, self.make_overloaded_lvalue_return_type(method).ty)
2405 fn check_method_argument_types(&self,
2407 method_fn_ty: Ty<'tcx>,
2408 callee_expr: &'gcx hir::Expr,
2409 args_no_rcvr: &'gcx [hir::Expr],
2410 tuple_arguments: TupleArgumentsFlag,
2411 expected: Expectation<'tcx>)
2413 if method_fn_ty.references_error() {
2414 let err_inputs = self.err_args(args_no_rcvr.len());
2416 let err_inputs = match tuple_arguments {
2417 DontTupleArguments => err_inputs,
2418 TupleArguments => vec![self.tcx.intern_tup(&err_inputs[..])],
2421 self.check_argument_types(sp, &err_inputs[..], &[], args_no_rcvr,
2422 false, tuple_arguments, None);
2425 match method_fn_ty.sty {
2426 ty::TyFnDef(def_id, .., ref fty) => {
2427 // HACK(eddyb) ignore self in the definition (see above).
2428 let expected_arg_tys = self.expected_types_for_fn_args(
2432 &fty.sig.0.inputs()[1..]
2434 self.check_argument_types(sp, &fty.sig.0.inputs()[1..], &expected_arg_tys[..],
2435 args_no_rcvr, fty.sig.0.variadic, tuple_arguments,
2436 self.tcx.hir.span_if_local(def_id));
2440 span_bug!(callee_expr.span, "method without bare fn type");
2446 /// Generic function that factors out common logic from function calls,
2447 /// method calls and overloaded operators.
2448 fn check_argument_types(&self,
2450 fn_inputs: &[Ty<'tcx>],
2451 expected_arg_tys: &[Ty<'tcx>],
2452 args: &'gcx [hir::Expr],
2454 tuple_arguments: TupleArgumentsFlag,
2455 def_span: Option<Span>) {
2458 // Grab the argument types, supplying fresh type variables
2459 // if the wrong number of arguments were supplied
2460 let supplied_arg_count = if tuple_arguments == DontTupleArguments {
2466 // All the input types from the fn signature must outlive the call
2467 // so as to validate implied bounds.
2468 for &fn_input_ty in fn_inputs {
2469 self.register_wf_obligation(fn_input_ty, sp, traits::MiscObligation);
2472 let mut expected_arg_tys = expected_arg_tys;
2473 let expected_arg_count = fn_inputs.len();
2475 let sp_args = if args.len() > 0 {
2476 let (first, args) = args.split_at(1);
2477 let mut sp_tmp = first[0].span;
2479 let sp_opt = self.sess().codemap().merge_spans(sp_tmp, arg.span);
2480 if ! sp_opt.is_some() {
2483 sp_tmp = sp_opt.unwrap();
2490 fn parameter_count_error<'tcx>(sess: &Session, sp: Span, expected_count: usize,
2491 arg_count: usize, error_code: &str, variadic: bool,
2492 def_span: Option<Span>) {
2493 let mut err = sess.struct_span_err_with_code(sp,
2494 &format!("this function takes {}{} parameter{} but {} parameter{} supplied",
2495 if variadic {"at least "} else {""},
2497 if expected_count == 1 {""} else {"s"},
2499 if arg_count == 1 {" was"} else {"s were"}),
2502 err.span_label(sp, &format!("expected {}{} parameter{}",
2503 if variadic {"at least "} else {""},
2505 if expected_count == 1 {""} else {"s"}));
2506 if let Some(def_s) = def_span {
2507 err.span_label(def_s, &format!("defined here"));
2512 let formal_tys = if tuple_arguments == TupleArguments {
2513 let tuple_type = self.structurally_resolved_type(sp, fn_inputs[0]);
2514 match tuple_type.sty {
2515 ty::TyTuple(arg_types) if arg_types.len() != args.len() => {
2516 parameter_count_error(tcx.sess, sp_args, arg_types.len(), args.len(),
2517 "E0057", false, def_span);
2518 expected_arg_tys = &[];
2519 self.err_args(args.len())
2521 ty::TyTuple(arg_types) => {
2522 expected_arg_tys = match expected_arg_tys.get(0) {
2523 Some(&ty) => match ty.sty {
2524 ty::TyTuple(ref tys) => &tys,
2532 span_err!(tcx.sess, sp, E0059,
2533 "cannot use call notation; the first type parameter \
2534 for the function trait is neither a tuple nor unit");
2535 expected_arg_tys = &[];
2536 self.err_args(args.len())
2539 } else if expected_arg_count == supplied_arg_count {
2541 } else if variadic {
2542 if supplied_arg_count >= expected_arg_count {
2545 parameter_count_error(tcx.sess, sp_args, expected_arg_count,
2546 supplied_arg_count, "E0060", true, def_span);
2547 expected_arg_tys = &[];
2548 self.err_args(supplied_arg_count)
2551 parameter_count_error(tcx.sess, sp_args, expected_arg_count,
2552 supplied_arg_count, "E0061", false, def_span);
2553 expected_arg_tys = &[];
2554 self.err_args(supplied_arg_count)
2557 debug!("check_argument_types: formal_tys={:?}",
2558 formal_tys.iter().map(|t| self.ty_to_string(*t)).collect::<Vec<String>>());
2560 // Check the arguments.
2561 // We do this in a pretty awful way: first we typecheck any arguments
2562 // that are not closures, then we typecheck the closures. This is so
2563 // that we have more information about the types of arguments when we
2564 // typecheck the functions. This isn't really the right way to do this.
2565 for &check_closures in &[false, true] {
2566 debug!("check_closures={}", check_closures);
2568 // More awful hacks: before we check argument types, try to do
2569 // an "opportunistic" vtable resolution of any trait bounds on
2570 // the call. This helps coercions.
2572 self.select_obligations_where_possible();
2575 // For variadic functions, we don't have a declared type for all of
2576 // the arguments hence we only do our usual type checking with
2577 // the arguments who's types we do know.
2578 let t = if variadic {
2580 } else if tuple_arguments == TupleArguments {
2585 for (i, arg) in args.iter().take(t).enumerate() {
2586 // Warn only for the first loop (the "no closures" one).
2587 // Closure arguments themselves can't be diverging, but
2588 // a previous argument can, e.g. `foo(panic!(), || {})`.
2589 if !check_closures {
2590 self.warn_if_unreachable(arg.id, arg.span, "expression");
2593 let is_closure = match arg.node {
2594 hir::ExprClosure(..) => true,
2598 if is_closure != check_closures {
2602 debug!("checking the argument");
2603 let formal_ty = formal_tys[i];
2605 // The special-cased logic below has three functions:
2606 // 1. Provide as good of an expected type as possible.
2607 let expected = expected_arg_tys.get(i).map(|&ty| {
2608 Expectation::rvalue_hint(self, ty)
2611 let checked_ty = self.check_expr_with_expectation(&arg,
2612 expected.unwrap_or(ExpectHasType(formal_ty)));
2613 // 2. Coerce to the most detailed type that could be coerced
2614 // to, which is `expected_ty` if `rvalue_hint` returns an
2615 // `ExpectHasType(expected_ty)`, or the `formal_ty` otherwise.
2616 let coerce_ty = expected.and_then(|e| e.only_has_type(self));
2617 self.demand_coerce(&arg, checked_ty, coerce_ty.unwrap_or(formal_ty));
2619 // 3. Relate the expected type and the formal one,
2620 // if the expected type was used for the coercion.
2621 coerce_ty.map(|ty| self.demand_suptype(arg.span, formal_ty, ty));
2625 // We also need to make sure we at least write the ty of the other
2626 // arguments which we skipped above.
2628 for arg in args.iter().skip(expected_arg_count) {
2629 let arg_ty = self.check_expr(&arg);
2631 // There are a few types which get autopromoted when passed via varargs
2632 // in C but we just error out instead and require explicit casts.
2633 let arg_ty = self.structurally_resolved_type(arg.span,
2636 ty::TyFloat(ast::FloatTy::F32) => {
2637 self.type_error_message(arg.span, |t| {
2638 format!("can't pass an `{}` to variadic \
2639 function, cast to `c_double`", t)
2642 ty::TyInt(ast::IntTy::I8) | ty::TyInt(ast::IntTy::I16) | ty::TyBool => {
2643 self.type_error_message(arg.span, |t| {
2644 format!("can't pass `{}` to variadic \
2645 function, cast to `c_int`",
2649 ty::TyUint(ast::UintTy::U8) | ty::TyUint(ast::UintTy::U16) => {
2650 self.type_error_message(arg.span, |t| {
2651 format!("can't pass `{}` to variadic \
2652 function, cast to `c_uint`",
2656 ty::TyFnDef(.., f) => {
2657 let ptr_ty = self.tcx.mk_fn_ptr(f);
2658 let ptr_ty = self.resolve_type_vars_if_possible(&ptr_ty);
2659 self.type_error_message(arg.span,
2661 format!("can't pass `{}` to variadic \
2662 function, cast to `{}`", t, ptr_ty)
2671 fn err_args(&self, len: usize) -> Vec<Ty<'tcx>> {
2672 (0..len).map(|_| self.tcx.types.err).collect()
2675 // AST fragment checking
2678 expected: Expectation<'tcx>)
2684 ast::LitKind::Str(..) => tcx.mk_static_str(),
2685 ast::LitKind::ByteStr(ref v) => {
2686 tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic),
2687 tcx.mk_array(tcx.types.u8, v.len()))
2689 ast::LitKind::Byte(_) => tcx.types.u8,
2690 ast::LitKind::Char(_) => tcx.types.char,
2691 ast::LitKind::Int(_, ast::LitIntType::Signed(t)) => tcx.mk_mach_int(t),
2692 ast::LitKind::Int(_, ast::LitIntType::Unsigned(t)) => tcx.mk_mach_uint(t),
2693 ast::LitKind::Int(_, ast::LitIntType::Unsuffixed) => {
2694 let opt_ty = expected.to_option(self).and_then(|ty| {
2696 ty::TyInt(_) | ty::TyUint(_) => Some(ty),
2697 ty::TyChar => Some(tcx.types.u8),
2698 ty::TyRawPtr(..) => Some(tcx.types.usize),
2699 ty::TyFnDef(..) | ty::TyFnPtr(_) => Some(tcx.types.usize),
2703 opt_ty.unwrap_or_else(
2704 || tcx.mk_int_var(self.next_int_var_id()))
2706 ast::LitKind::Float(_, t) => tcx.mk_mach_float(t),
2707 ast::LitKind::FloatUnsuffixed(_) => {
2708 let opt_ty = expected.to_option(self).and_then(|ty| {
2710 ty::TyFloat(_) => Some(ty),
2714 opt_ty.unwrap_or_else(
2715 || tcx.mk_float_var(self.next_float_var_id()))
2717 ast::LitKind::Bool(_) => tcx.types.bool
2721 fn check_expr_eq_type(&self,
2722 expr: &'gcx hir::Expr,
2723 expected: Ty<'tcx>) {
2724 let ty = self.check_expr_with_hint(expr, expected);
2725 self.demand_eqtype(expr.span, expected, ty);
2728 pub fn check_expr_has_type(&self,
2729 expr: &'gcx hir::Expr,
2730 expected: Ty<'tcx>) -> Ty<'tcx> {
2731 let ty = self.check_expr_with_hint(expr, expected);
2732 self.demand_suptype(expr.span, expected, ty);
2736 fn check_expr_coercable_to_type(&self,
2737 expr: &'gcx hir::Expr,
2738 expected: Ty<'tcx>) -> Ty<'tcx> {
2739 let ty = self.check_expr_with_hint(expr, expected);
2740 self.demand_coerce(expr, ty, expected);
2744 fn check_expr_with_hint(&self, expr: &'gcx hir::Expr,
2745 expected: Ty<'tcx>) -> Ty<'tcx> {
2746 self.check_expr_with_expectation(expr, ExpectHasType(expected))
2749 fn check_expr_with_expectation(&self,
2750 expr: &'gcx hir::Expr,
2751 expected: Expectation<'tcx>) -> Ty<'tcx> {
2752 self.check_expr_with_expectation_and_lvalue_pref(expr, expected, NoPreference)
2755 fn check_expr(&self, expr: &'gcx hir::Expr) -> Ty<'tcx> {
2756 self.check_expr_with_expectation(expr, NoExpectation)
2759 fn check_expr_with_lvalue_pref(&self, expr: &'gcx hir::Expr,
2760 lvalue_pref: LvaluePreference) -> Ty<'tcx> {
2761 self.check_expr_with_expectation_and_lvalue_pref(expr, NoExpectation, lvalue_pref)
2764 // determine the `self` type, using fresh variables for all variables
2765 // declared on the impl declaration e.g., `impl<A,B> for Vec<(A,B)>`
2766 // would return ($0, $1) where $0 and $1 are freshly instantiated type
2768 pub fn impl_self_ty(&self,
2769 span: Span, // (potential) receiver for this impl
2771 -> TypeAndSubsts<'tcx> {
2772 let ity = self.tcx.item_type(did);
2773 debug!("impl_self_ty: ity={:?}", ity);
2775 let substs = self.fresh_substs_for_item(span, did);
2776 let substd_ty = self.instantiate_type_scheme(span, &substs, &ity);
2778 TypeAndSubsts { substs: substs, ty: substd_ty }
2781 /// Unifies the return type with the expected type early, for more coercions
2782 /// and forward type information on the argument expressions.
2783 fn expected_types_for_fn_args(&self,
2785 expected_ret: Expectation<'tcx>,
2786 formal_ret: Ty<'tcx>,
2787 formal_args: &[Ty<'tcx>])
2789 let expected_args = expected_ret.only_has_type(self).and_then(|ret_ty| {
2790 self.fudge_regions_if_ok(&RegionVariableOrigin::Coercion(call_span), || {
2791 // Attempt to apply a subtyping relationship between the formal
2792 // return type (likely containing type variables if the function
2793 // is polymorphic) and the expected return type.
2794 // No argument expectations are produced if unification fails.
2795 let origin = self.misc(call_span);
2796 let ures = self.sub_types(false, &origin, formal_ret, ret_ty);
2797 // FIXME(#15760) can't use try! here, FromError doesn't default
2798 // to identity so the resulting type is not constrained.
2800 Ok(ok) => self.register_infer_ok_obligations(ok),
2801 Err(e) => return Err(e),
2804 // Record all the argument types, with the substitutions
2805 // produced from the above subtyping unification.
2806 Ok(formal_args.iter().map(|ty| {
2807 self.resolve_type_vars_if_possible(ty)
2810 }).unwrap_or(vec![]);
2811 debug!("expected_types_for_fn_args(formal={:?} -> {:?}, expected={:?} -> {:?})",
2812 formal_args, formal_ret,
2813 expected_args, expected_ret);
2817 // Checks a method call.
2818 fn check_method_call(&self,
2819 expr: &'gcx hir::Expr,
2820 method_name: Spanned<ast::Name>,
2821 args: &'gcx [hir::Expr],
2823 expected: Expectation<'tcx>,
2824 lvalue_pref: LvaluePreference) -> Ty<'tcx> {
2825 let rcvr = &args[0];
2826 let rcvr_t = self.check_expr_with_lvalue_pref(&rcvr, lvalue_pref);
2828 // no need to check for bot/err -- callee does that
2829 let expr_t = self.structurally_resolved_type(expr.span, rcvr_t);
2831 let tps = tps.iter().map(|ast_ty| self.to_ty(&ast_ty)).collect::<Vec<_>>();
2832 let fn_ty = match self.lookup_method(method_name.span,
2839 let method_ty = method.ty;
2840 let method_call = MethodCall::expr(expr.id);
2841 self.tables.borrow_mut().method_map.insert(method_call, method);
2845 if method_name.node != keywords::Invalid.name() {
2846 self.report_method_error(method_name.span,
2853 self.write_error(expr.id);
2858 // Call the generic checker.
2859 let ret_ty = self.check_method_argument_types(method_name.span, fn_ty,
2867 // A generic function for checking the then and else in an if
2869 fn check_then_else(&self,
2870 cond_expr: &'gcx hir::Expr,
2871 then_blk: &'gcx hir::Block,
2872 opt_else_expr: Option<&'gcx hir::Expr>,
2874 expected: Expectation<'tcx>) -> Ty<'tcx> {
2875 let cond_ty = self.check_expr_has_type(cond_expr, self.tcx.types.bool);
2876 let cond_diverges = self.diverges.get();
2877 self.diverges.set(Diverges::Maybe);
2879 let expected = expected.adjust_for_branches(self);
2880 let then_ty = self.check_block_with_expected(then_blk, expected);
2881 let then_diverges = self.diverges.get();
2882 self.diverges.set(Diverges::Maybe);
2884 let unit = self.tcx.mk_nil();
2885 let (cause, expected_ty, found_ty, result);
2886 if let Some(else_expr) = opt_else_expr {
2887 let else_ty = self.check_expr_with_expectation(else_expr, expected);
2888 let else_diverges = self.diverges.get();
2889 cause = self.cause(sp, ObligationCauseCode::IfExpression);
2891 // Only try to coerce-unify if we have a then expression
2892 // to assign coercions to, otherwise it's () or diverging.
2893 expected_ty = then_ty;
2895 result = if let Some(ref then) = then_blk.expr {
2896 let res = self.try_find_coercion_lub(&cause, || Some(&**then),
2897 then_ty, else_expr, else_ty);
2899 // In case we did perform an adjustment, we have to update
2900 // the type of the block, because old trans still uses it.
2902 let adj = self.tables.borrow().adjustments.get(&then.id).cloned();
2903 if let Some(adj) = adj {
2904 self.write_ty(then_blk.id, adj.target);
2910 self.commit_if_ok(|_| {
2911 let trace = TypeTrace::types(&cause, true, then_ty, else_ty);
2912 self.lub(true, trace, &then_ty, &else_ty)
2913 .map(|ok| self.register_infer_ok_obligations(ok))
2917 // We won't diverge unless both branches do (or the condition does).
2918 self.diverges.set(cond_diverges | then_diverges & else_diverges);
2920 // If the condition is false we can't diverge.
2921 self.diverges.set(cond_diverges);
2923 cause = self.cause(sp, ObligationCauseCode::IfExpressionWithNoElse);
2926 result = self.eq_types(true, &cause, unit, then_ty)
2928 self.register_infer_ok_obligations(ok);
2935 if cond_ty.references_error() {
2942 self.report_mismatched_types(&cause, expected_ty, found_ty, e).emit();
2948 // Check field access expressions
2949 fn check_field(&self,
2950 expr: &'gcx hir::Expr,
2951 lvalue_pref: LvaluePreference,
2952 base: &'gcx hir::Expr,
2953 field: &Spanned<ast::Name>) -> Ty<'tcx> {
2954 let expr_t = self.check_expr_with_lvalue_pref(base, lvalue_pref);
2955 let expr_t = self.structurally_resolved_type(expr.span,
2957 let mut private_candidate = None;
2958 let mut autoderef = self.autoderef(expr.span, expr_t);
2959 while let Some((base_t, autoderefs)) = autoderef.next() {
2961 ty::TyAdt(base_def, substs) if !base_def.is_enum() => {
2962 debug!("struct named {:?}", base_t);
2963 if let Some(field) = base_def.struct_variant().find_field_named(field.node) {
2964 let field_ty = self.field_ty(expr.span, field, substs);
2965 if self.tcx.vis_is_accessible_from(field.vis, self.body_id) {
2966 autoderef.finalize(lvalue_pref, Some(base));
2967 self.write_autoderef_adjustment(base.id, autoderefs, base_t);
2969 self.tcx.check_stability(field.did, expr.id, expr.span);
2973 private_candidate = Some((base_def.did, field_ty));
2979 autoderef.unambiguous_final_ty();
2981 if let Some((did, field_ty)) = private_candidate {
2982 let struct_path = self.tcx().item_path_str(did);
2983 let msg = format!("field `{}` of struct `{}` is private", field.node, struct_path);
2984 let mut err = self.tcx().sess.struct_span_err(expr.span, &msg);
2985 // Also check if an accessible method exists, which is often what is meant.
2986 if self.method_exists(field.span, field.node, expr_t, expr.id, false) {
2987 err.note(&format!("a method `{}` also exists, perhaps you wish to call it",
2992 } else if field.node == keywords::Invalid.name() {
2993 self.tcx().types.err
2994 } else if self.method_exists(field.span, field.node, expr_t, expr.id, true) {
2995 self.type_error_struct(field.span, |actual| {
2996 format!("attempted to take value of method `{}` on type \
2997 `{}`", field.node, actual)
2999 .help("maybe a `()` to call it is missing? \
3000 If not, try an anonymous function")
3002 self.tcx().types.err
3004 let mut err = self.type_error_struct(field.span, |actual| {
3005 format!("no field `{}` on type `{}`",
3009 ty::TyAdt(def, _) if !def.is_enum() => {
3010 if let Some(suggested_field_name) =
3011 Self::suggest_field_name(def.struct_variant(), field, vec![]) {
3012 err.span_label(field.span,
3013 &format!("did you mean `{}`?", suggested_field_name));
3015 err.span_label(field.span,
3016 &format!("unknown field"));
3019 ty::TyRawPtr(..) => {
3020 err.note(&format!("`{0}` is a native pointer; perhaps you need to deref with \
3022 self.tcx.hir.node_to_pretty_string(base.id),
3028 self.tcx().types.err
3032 // Return an hint about the closest match in field names
3033 fn suggest_field_name(variant: &'tcx ty::VariantDef,
3034 field: &Spanned<ast::Name>,
3035 skip : Vec<InternedString>)
3037 let name = field.node.as_str();
3038 let names = variant.fields.iter().filter_map(|field| {
3039 // ignore already set fields and private fields from non-local crates
3040 if skip.iter().any(|x| *x == field.name.as_str()) ||
3041 (variant.did.krate != LOCAL_CRATE && field.vis != Visibility::Public) {
3048 // only find fits with at least one matching letter
3049 find_best_match_for_name(names, &name, Some(name.len()))
3052 // Check tuple index expressions
3053 fn check_tup_field(&self,
3054 expr: &'gcx hir::Expr,
3055 lvalue_pref: LvaluePreference,
3056 base: &'gcx hir::Expr,
3057 idx: codemap::Spanned<usize>) -> Ty<'tcx> {
3058 let expr_t = self.check_expr_with_lvalue_pref(base, lvalue_pref);
3059 let expr_t = self.structurally_resolved_type(expr.span,
3061 let mut private_candidate = None;
3062 let mut tuple_like = false;
3063 let mut autoderef = self.autoderef(expr.span, expr_t);
3064 while let Some((base_t, autoderefs)) = autoderef.next() {
3065 let field = match base_t.sty {
3066 ty::TyAdt(base_def, substs) if base_def.is_struct() => {
3067 tuple_like = base_def.struct_variant().ctor_kind == CtorKind::Fn;
3068 if !tuple_like { continue }
3070 debug!("tuple struct named {:?}", base_t);
3071 base_def.struct_variant().fields.get(idx.node).and_then(|field| {
3072 let field_ty = self.field_ty(expr.span, field, substs);
3073 private_candidate = Some((base_def.did, field_ty));
3074 if self.tcx.vis_is_accessible_from(field.vis, self.body_id) {
3075 self.tcx.check_stability(field.did, expr.id, expr.span);
3082 ty::TyTuple(ref v) => {
3084 v.get(idx.node).cloned()
3089 if let Some(field_ty) = field {
3090 autoderef.finalize(lvalue_pref, Some(base));
3091 self.write_autoderef_adjustment(base.id, autoderefs, base_t);
3095 autoderef.unambiguous_final_ty();
3097 if let Some((did, field_ty)) = private_candidate {
3098 let struct_path = self.tcx().item_path_str(did);
3099 let msg = format!("field `{}` of struct `{}` is private", idx.node, struct_path);
3100 self.tcx().sess.span_err(expr.span, &msg);
3104 self.type_error_message(
3108 format!("attempted out-of-bounds tuple index `{}` on \
3113 format!("attempted tuple index `{}` on type `{}`, but the \
3114 type was not a tuple or tuple struct",
3121 self.tcx().types.err
3124 fn report_unknown_field(&self,
3126 variant: &'tcx ty::VariantDef,
3128 skip_fields: &[hir::Field],
3130 let mut err = self.type_error_struct_with_diag(
3132 |actual| match ty.sty {
3133 ty::TyAdt(adt, ..) if adt.is_enum() => {
3134 struct_span_err!(self.tcx.sess, field.name.span, E0559,
3135 "{} `{}::{}` has no field named `{}`",
3136 kind_name, actual, variant.name, field.name.node)
3139 struct_span_err!(self.tcx.sess, field.name.span, E0560,
3140 "{} `{}` has no field named `{}`",
3141 kind_name, actual, field.name.node)
3145 // prevent all specified fields from being suggested
3146 let skip_fields = skip_fields.iter().map(|ref x| x.name.node.as_str());
3147 if let Some(field_name) = Self::suggest_field_name(variant,
3149 skip_fields.collect()) {
3150 err.span_label(field.name.span,
3151 &format!("field does not exist - did you mean `{}`?", field_name));
3154 ty::TyAdt(adt, ..) if adt.is_enum() => {
3155 err.span_label(field.name.span, &format!("`{}::{}` does not have this field",
3159 err.span_label(field.name.span, &format!("`{}` does not have this field", ty));
3166 fn check_expr_struct_fields(&self,
3168 expr_id: ast::NodeId,
3170 variant: &'tcx ty::VariantDef,
3171 ast_fields: &'gcx [hir::Field],
3172 check_completeness: bool) {
3174 let (substs, adt_kind, kind_name) = match adt_ty.sty {
3175 ty::TyAdt(adt, substs) => (substs, adt.adt_kind(), adt.variant_descr()),
3176 _ => span_bug!(span, "non-ADT passed to check_expr_struct_fields")
3179 let mut remaining_fields = FxHashMap();
3180 for field in &variant.fields {
3181 remaining_fields.insert(field.name, field);
3184 let mut seen_fields = FxHashMap();
3186 let mut error_happened = false;
3188 // Typecheck each field.
3189 for field in ast_fields {
3190 let expected_field_type;
3192 if let Some(v_field) = remaining_fields.remove(&field.name.node) {
3193 expected_field_type = self.field_ty(field.span, v_field, substs);
3195 seen_fields.insert(field.name.node, field.span);
3197 // we don't look at stability attributes on
3198 // struct-like enums (yet...), but it's definitely not
3199 // a bug to have construct one.
3200 if adt_kind != ty::AdtKind::Enum {
3201 tcx.check_stability(v_field.did, expr_id, field.span);
3204 error_happened = true;
3205 expected_field_type = tcx.types.err;
3206 if let Some(_) = variant.find_field_named(field.name.node) {
3207 let mut err = struct_span_err!(self.tcx.sess,
3210 "field `{}` specified more than once",
3213 err.span_label(field.name.span, &format!("used more than once"));
3215 if let Some(prev_span) = seen_fields.get(&field.name.node) {
3216 err.span_label(*prev_span, &format!("first use of `{}`", field.name.node));
3221 self.report_unknown_field(adt_ty, variant, field, ast_fields, kind_name);
3225 // Make sure to give a type to the field even if there's
3226 // an error, so we can continue typechecking
3227 self.check_expr_coercable_to_type(&field.expr, expected_field_type);
3230 // Make sure the programmer specified correct number of fields.
3231 if kind_name == "union" {
3232 if ast_fields.len() != 1 {
3233 tcx.sess.span_err(span, "union expressions should have exactly one field");
3235 } else if check_completeness && !error_happened && !remaining_fields.is_empty() {
3236 let len = remaining_fields.len();
3238 let mut displayable_field_names = remaining_fields
3240 .map(|x| x.as_str())
3241 .collect::<Vec<_>>();
3243 displayable_field_names.sort();
3245 let truncated_fields_error = if len <= 3 {
3248 format!(" and {} other field{}", (len - 3), if len - 3 == 1 {""} else {"s"})
3251 let remaining_fields_names = displayable_field_names.iter().take(3)
3252 .map(|n| format!("`{}`", n))
3253 .collect::<Vec<_>>()
3256 struct_span_err!(tcx.sess, span, E0063,
3257 "missing field{} {}{} in initializer of `{}`",
3258 if remaining_fields.len() == 1 {""} else {"s"},
3259 remaining_fields_names,
3260 truncated_fields_error,
3262 .span_label(span, &format!("missing {}{}",
3263 remaining_fields_names,
3264 truncated_fields_error))
3269 fn check_struct_fields_on_error(&self,
3270 fields: &'gcx [hir::Field],
3271 base_expr: &'gcx Option<P<hir::Expr>>) {
3272 for field in fields {
3273 self.check_expr(&field.expr);
3277 self.check_expr(&base);
3283 pub fn check_struct_path(&self,
3285 node_id: ast::NodeId)
3286 -> Option<(&'tcx ty::VariantDef, Ty<'tcx>)> {
3287 let path_span = match *qpath {
3288 hir::QPath::Resolved(_, ref path) => path.span,
3289 hir::QPath::TypeRelative(ref qself, _) => qself.span
3291 let (def, ty) = self.finish_resolving_struct_path(qpath, path_span, node_id);
3292 let variant = match def {
3294 self.set_tainted_by_errors();
3297 Def::Variant(..) => {
3299 ty::TyAdt(adt, substs) => {
3300 Some((adt.variant_of_def(def), adt.did, substs))
3302 _ => bug!("unexpected type: {:?}", ty.sty)
3305 Def::Struct(..) | Def::Union(..) | Def::TyAlias(..) |
3306 Def::AssociatedTy(..) | Def::SelfTy(..) => {
3308 ty::TyAdt(adt, substs) if !adt.is_enum() => {
3309 Some((adt.struct_variant(), adt.did, substs))
3314 _ => bug!("unexpected definition: {:?}", def)
3317 if let Some((variant, did, substs)) = variant {
3318 // Check bounds on type arguments used in the path.
3319 let bounds = self.instantiate_bounds(path_span, did, substs);
3320 let cause = traits::ObligationCause::new(path_span, self.body_id,
3321 traits::ItemObligation(did));
3322 self.add_obligations_for_parameters(cause, &bounds);
3326 struct_span_err!(self.tcx.sess, path_span, E0071,
3327 "expected struct, variant or union type, found {}",
3328 ty.sort_string(self.tcx))
3329 .span_label(path_span, &format!("not a struct"))
3335 fn check_expr_struct(&self,
3338 fields: &'gcx [hir::Field],
3339 base_expr: &'gcx Option<P<hir::Expr>>) -> Ty<'tcx>
3341 // Find the relevant variant
3342 let (variant, struct_ty) =
3343 if let Some(variant_ty) = self.check_struct_path(qpath, expr.id) {
3346 self.check_struct_fields_on_error(fields, base_expr);
3347 return self.tcx.types.err;
3350 let path_span = match *qpath {
3351 hir::QPath::Resolved(_, ref path) => path.span,
3352 hir::QPath::TypeRelative(ref qself, _) => qself.span
3355 self.check_expr_struct_fields(struct_ty, expr.id, path_span, variant, fields,
3356 base_expr.is_none());
3357 if let &Some(ref base_expr) = base_expr {
3358 self.check_expr_has_type(base_expr, struct_ty);
3359 match struct_ty.sty {
3360 ty::TyAdt(adt, substs) if adt.is_struct() => {
3361 self.tables.borrow_mut().fru_field_types.insert(
3363 adt.struct_variant().fields.iter().map(|f| {
3364 self.normalize_associated_types_in(
3365 expr.span, &f.ty(self.tcx, substs)
3371 span_err!(self.tcx.sess, base_expr.span, E0436,
3372 "functional record update syntax requires a struct");
3376 self.require_type_is_sized(struct_ty, expr.span, traits::StructInitializerSized);
3382 /// If an expression has any sub-expressions that result in a type error,
3383 /// inspecting that expression's type with `ty.references_error()` will return
3384 /// true. Likewise, if an expression is known to diverge, inspecting its
3385 /// type with `ty::type_is_bot` will return true (n.b.: since Rust is
3386 /// strict, _|_ can appear in the type of an expression that does not,
3387 /// itself, diverge: for example, fn() -> _|_.)
3388 /// Note that inspecting a type's structure *directly* may expose the fact
3389 /// that there are actually multiple representations for `TyError`, so avoid
3390 /// that when err needs to be handled differently.
3391 fn check_expr_with_expectation_and_lvalue_pref(&self,
3392 expr: &'gcx hir::Expr,
3393 expected: Expectation<'tcx>,
3394 lvalue_pref: LvaluePreference) -> Ty<'tcx> {
3395 debug!(">> typechecking: expr={:?} expected={:?}",
3398 // Warn for expressions after diverging siblings.
3399 self.warn_if_unreachable(expr.id, expr.span, "expression");
3401 // Hide the outer diverging and has_errors flags.
3402 let old_diverges = self.diverges.get();
3403 let old_has_errors = self.has_errors.get();
3404 self.diverges.set(Diverges::Maybe);
3405 self.has_errors.set(false);
3407 let ty = self.check_expr_kind(expr, expected, lvalue_pref);
3409 // Warn for non-block expressions with diverging children.
3412 hir::ExprLoop(..) | hir::ExprWhile(..) |
3413 hir::ExprIf(..) | hir::ExprMatch(..) => {}
3415 _ => self.warn_if_unreachable(expr.id, expr.span, "expression")
3418 // Record the type, which applies it effects.
3419 // We need to do this after the warning above, so that
3420 // we don't warn for the diverging expression itself.
3421 self.write_ty(expr.id, ty);
3423 // Combine the diverging and has_error flags.
3424 self.diverges.set(self.diverges.get() | old_diverges);
3425 self.has_errors.set(self.has_errors.get() | old_has_errors);
3427 debug!("type of {} is...", self.tcx.hir.node_to_string(expr.id));
3428 debug!("... {:?}, expected is {:?}", ty, expected);
3430 // Add adjustments to !-expressions
3432 if let Some(hir::map::NodeExpr(node_expr)) = self.tcx.hir.find(expr.id) {
3433 let adj_ty = self.next_diverging_ty_var(
3434 TypeVariableOrigin::AdjustmentType(node_expr.span));
3435 self.write_adjustment(expr.id, adjustment::Adjustment {
3436 kind: adjustment::Adjust::NeverToAny,
3445 fn check_expr_kind(&self,
3446 expr: &'gcx hir::Expr,
3447 expected: Expectation<'tcx>,
3448 lvalue_pref: LvaluePreference) -> Ty<'tcx> {
3452 hir::ExprBox(ref subexpr) => {
3453 let expected_inner = expected.to_option(self).map_or(NoExpectation, |ty| {
3455 ty::TyBox(ty) => Expectation::rvalue_hint(self, ty),
3459 let referent_ty = self.check_expr_with_expectation(subexpr, expected_inner);
3460 tcx.mk_box(referent_ty)
3463 hir::ExprLit(ref lit) => {
3464 self.check_lit(&lit, expected)
3466 hir::ExprBinary(op, ref lhs, ref rhs) => {
3467 self.check_binop(expr, op, lhs, rhs)
3469 hir::ExprAssignOp(op, ref lhs, ref rhs) => {
3470 self.check_binop_assign(expr, op, lhs, rhs)
3472 hir::ExprUnary(unop, ref oprnd) => {
3473 let expected_inner = match unop {
3474 hir::UnNot | hir::UnNeg => {
3481 let lvalue_pref = match unop {
3482 hir::UnDeref => lvalue_pref,
3485 let mut oprnd_t = self.check_expr_with_expectation_and_lvalue_pref(&oprnd,
3489 if !oprnd_t.references_error() {
3492 oprnd_t = self.structurally_resolved_type(expr.span, oprnd_t);
3494 if let Some(mt) = oprnd_t.builtin_deref(true, NoPreference) {
3496 } else if let Some(method) = self.try_overloaded_deref(
3497 expr.span, Some(&oprnd), oprnd_t, lvalue_pref) {
3498 oprnd_t = self.make_overloaded_lvalue_return_type(method).ty;
3499 self.tables.borrow_mut().method_map.insert(MethodCall::expr(expr.id),
3502 self.type_error_message(expr.span, |actual| {
3503 format!("type `{}` cannot be \
3504 dereferenced", actual)
3506 oprnd_t = tcx.types.err;
3510 oprnd_t = self.structurally_resolved_type(oprnd.span,
3512 let result = self.check_user_unop("!", "not",
3513 tcx.lang_items.not_trait(),
3514 expr, &oprnd, oprnd_t, unop);
3515 // If it's builtin, we can reuse the type, this helps inference.
3516 if !(oprnd_t.is_integral() || oprnd_t.sty == ty::TyBool) {
3521 oprnd_t = self.structurally_resolved_type(oprnd.span,
3523 let result = self.check_user_unop("-", "neg",
3524 tcx.lang_items.neg_trait(),
3525 expr, &oprnd, oprnd_t, unop);
3526 // If it's builtin, we can reuse the type, this helps inference.
3527 if !(oprnd_t.is_integral() || oprnd_t.is_fp()) {
3535 hir::ExprAddrOf(mutbl, ref oprnd) => {
3536 let hint = expected.only_has_type(self).map_or(NoExpectation, |ty| {
3538 ty::TyRef(_, ref mt) | ty::TyRawPtr(ref mt) => {
3539 if self.tcx.expr_is_lval(&oprnd) {
3540 // Lvalues may legitimately have unsized types.
3541 // For example, dereferences of a fat pointer and
3542 // the last field of a struct can be unsized.
3543 ExpectHasType(mt.ty)
3545 Expectation::rvalue_hint(self, mt.ty)
3551 let lvalue_pref = LvaluePreference::from_mutbl(mutbl);
3552 let ty = self.check_expr_with_expectation_and_lvalue_pref(&oprnd, hint, lvalue_pref);
3554 let tm = ty::TypeAndMut { ty: ty, mutbl: mutbl };
3555 if tm.ty.references_error() {
3558 // Note: at this point, we cannot say what the best lifetime
3559 // is to use for resulting pointer. We want to use the
3560 // shortest lifetime possible so as to avoid spurious borrowck
3561 // errors. Moreover, the longest lifetime will depend on the
3562 // precise details of the value whose address is being taken
3563 // (and how long it is valid), which we don't know yet until type
3564 // inference is complete.
3566 // Therefore, here we simply generate a region variable. The
3567 // region inferencer will then select the ultimate value.
3568 // Finally, borrowck is charged with guaranteeing that the
3569 // value whose address was taken can actually be made to live
3570 // as long as it needs to live.
3571 let region = self.next_region_var(infer::AddrOfRegion(expr.span));
3572 tcx.mk_ref(region, tm)
3575 hir::ExprPath(ref qpath) => {
3576 let (def, opt_ty, segments) = self.resolve_ty_and_def_ufcs(qpath,
3577 expr.id, expr.span);
3578 let ty = if def != Def::Err {
3579 self.instantiate_value_path(segments, opt_ty, def, expr.span, id)
3581 self.set_tainted_by_errors();
3585 // We always require that the type provided as the value for
3586 // a type parameter outlives the moment of instantiation.
3587 self.opt_node_ty_substs(expr.id, |item_substs| {
3588 self.add_wf_bounds(&item_substs.substs, expr);
3593 hir::ExprInlineAsm(_, ref outputs, ref inputs) => {
3594 for output in outputs {
3595 self.check_expr(output);
3597 for input in inputs {
3598 self.check_expr(input);
3602 hir::ExprBreak(label, ref expr_opt) => {
3603 let loop_id = label.map(|l| l.loop_id);
3605 let mut enclosing_loops = self.enclosing_loops.borrow_mut();
3606 enclosing_loops.find_loop(loop_id).map(|ctxt| ctxt.coerce_to)
3608 if let Some(coerce_to) = coerce_to {
3611 if let Some(ref e) = *expr_opt {
3612 // Recurse without `enclosing_loops` borrowed.
3613 e_ty = self.check_expr_with_hint(e, coerce_to);
3614 cause = self.misc(e.span);
3615 // Notably, the recursive call may alter coerce_to - must not keep using it!
3617 // `break` without argument acts like `break ()`.
3618 e_ty = tcx.mk_nil();
3619 cause = self.misc(expr.span);
3621 let mut enclosing_loops = self.enclosing_loops.borrow_mut();
3622 let ctxt = enclosing_loops.find_loop(loop_id).unwrap();
3624 let result = if let Some(ref e) = *expr_opt {
3625 // Special-case the first element, as it has no "previous expressions".
3626 let result = if !ctxt.may_break {
3627 self.try_coerce(e, e_ty, ctxt.coerce_to)
3629 self.try_find_coercion_lub(&cause, || ctxt.break_exprs.iter().cloned(),
3630 ctxt.unified, e, e_ty)
3633 ctxt.break_exprs.push(e);
3636 self.eq_types(true, &cause, e_ty, ctxt.unified)
3637 .map(|InferOk { obligations, .. }| {
3638 // FIXME(#32730) propagate obligations
3639 assert!(obligations.is_empty());
3644 Ok(ty) => ctxt.unified = ty,
3646 self.report_mismatched_types(&cause, ctxt.unified, e_ty, err).emit();
3650 ctxt.may_break = true;
3652 // Otherwise, we failed to find the enclosing loop; this can only happen if the
3653 // `break` was not inside a loop at all, which is caught by the loop-checking pass.
3656 hir::ExprAgain(_) => { tcx.types.never }
3657 hir::ExprRet(ref expr_opt) => {
3658 if self.ret_ty.is_none() {
3659 struct_span_err!(self.tcx.sess, expr.span, E0572,
3660 "return statement outside of function body").emit();
3661 } else if let Some(ref e) = *expr_opt {
3662 self.check_expr_coercable_to_type(&e, self.ret_ty.unwrap());
3664 match self.eq_types(false,
3665 &self.misc(expr.span),
3666 self.ret_ty.unwrap(),
3668 Ok(ok) => self.register_infer_ok_obligations(ok),
3670 struct_span_err!(tcx.sess, expr.span, E0069,
3671 "`return;` in a function whose return type is not `()`")
3672 .span_label(expr.span, &format!("return type is not ()"))
3679 hir::ExprAssign(ref lhs, ref rhs) => {
3680 let lhs_ty = self.check_expr_with_lvalue_pref(&lhs, PreferMutLvalue);
3683 if !tcx.expr_is_lval(&lhs) {
3685 tcx.sess, expr.span, E0070,
3686 "invalid left-hand side expression")
3689 &format!("left-hand of expression not valid"))
3693 let rhs_ty = self.check_expr_coercable_to_type(&rhs, lhs_ty);
3695 self.require_type_is_sized(lhs_ty, lhs.span, traits::AssignmentLhsSized);
3697 if lhs_ty.references_error() || rhs_ty.references_error() {
3703 hir::ExprIf(ref cond, ref then_blk, ref opt_else_expr) => {
3704 self.check_then_else(&cond, &then_blk, opt_else_expr.as_ref().map(|e| &**e),
3705 expr.span, expected)
3707 hir::ExprWhile(ref cond, ref body, _) => {
3708 let unified = self.tcx.mk_nil();
3709 let coerce_to = unified;
3710 let ctxt = LoopCtxt {
3712 coerce_to: coerce_to,
3713 break_exprs: vec![],
3716 self.with_loop_ctxt(expr.id, ctxt, || {
3717 self.check_expr_has_type(&cond, tcx.types.bool);
3718 let cond_diverging = self.diverges.get();
3719 self.check_block_no_value(&body);
3721 // We may never reach the body so it diverging means nothing.
3722 self.diverges.set(cond_diverging);
3725 if self.has_errors.get() {
3731 hir::ExprLoop(ref body, _, _) => {
3732 let unified = self.next_ty_var(TypeVariableOrigin::TypeInference(body.span));
3733 let coerce_to = expected.only_has_type(self).unwrap_or(unified);
3734 let ctxt = LoopCtxt {
3736 coerce_to: coerce_to,
3737 break_exprs: vec![],
3741 let ctxt = self.with_loop_ctxt(expr.id, ctxt, || {
3742 self.check_block_no_value(&body);
3745 // No way to know whether it's diverging because
3746 // of a `break` or an outer `break` or `return.
3747 self.diverges.set(Diverges::Maybe);
3754 hir::ExprMatch(ref discrim, ref arms, match_src) => {
3755 self.check_match(expr, &discrim, arms, expected, match_src)
3757 hir::ExprClosure(capture, ref decl, body_id, _) => {
3758 self.check_expr_closure(expr, capture, &decl, body_id, expected)
3760 hir::ExprBlock(ref b) => {
3761 self.check_block_with_expected(&b, expected)
3763 hir::ExprCall(ref callee, ref args) => {
3764 self.check_call(expr, &callee, args, expected)
3766 hir::ExprMethodCall(name, ref tps, ref args) => {
3767 self.check_method_call(expr, name, args, &tps[..], expected, lvalue_pref)
3769 hir::ExprCast(ref e, ref t) => {
3770 // Find the type of `e`. Supply hints based on the type we are casting to,
3772 let t_cast = self.to_ty(t);
3773 let t_cast = self.resolve_type_vars_if_possible(&t_cast);
3774 let t_expr = self.check_expr_with_expectation(e, ExpectCastableToType(t_cast));
3775 let t_cast = self.resolve_type_vars_if_possible(&t_cast);
3777 // Eagerly check for some obvious errors.
3778 if t_expr.references_error() || t_cast.references_error() {
3781 // Defer other checks until we're done type checking.
3782 let mut deferred_cast_checks = self.deferred_cast_checks.borrow_mut();
3783 match cast::CastCheck::new(self, e, t_expr, t_cast, t.span, expr.span) {
3785 deferred_cast_checks.push(cast_check);
3788 Err(ErrorReported) => {
3794 hir::ExprType(ref e, ref t) => {
3795 let typ = self.to_ty(&t);
3796 self.check_expr_eq_type(&e, typ);
3799 hir::ExprArray(ref args) => {
3800 let uty = expected.to_option(self).and_then(|uty| {
3802 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3807 let mut unified = self.next_ty_var(TypeVariableOrigin::TypeInference(expr.span));
3808 let coerce_to = uty.unwrap_or(unified);
3810 for (i, e) in args.iter().enumerate() {
3811 let e_ty = self.check_expr_with_hint(e, coerce_to);
3812 let cause = self.misc(e.span);
3814 // Special-case the first element, as it has no "previous expressions".
3815 let result = if i == 0 {
3816 self.try_coerce(e, e_ty, coerce_to)
3818 let prev_elems = || args[..i].iter().map(|e| &*e);
3819 self.try_find_coercion_lub(&cause, prev_elems, unified, e, e_ty)
3823 Ok(ty) => unified = ty,
3825 self.report_mismatched_types(&cause, unified, e_ty, e).emit();
3829 tcx.mk_array(unified, args.len())
3831 hir::ExprRepeat(ref element, count) => {
3832 let count = eval_length(self.tcx.global_tcx(), count, "repeat count")
3835 let uty = match expected {
3836 ExpectHasType(uty) => {
3838 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3845 let (element_ty, t) = match uty {
3847 self.check_expr_coercable_to_type(&element, uty);
3851 let t: Ty = self.next_ty_var(TypeVariableOrigin::MiscVariable(element.span));
3852 let element_ty = self.check_expr_has_type(&element, t);
3858 // For [foo, ..n] where n > 1, `foo` must have
3860 let lang_item = self.tcx.require_lang_item(lang_items::CopyTraitLangItem);
3861 self.require_type_meets(t, expr.span, traits::RepeatVec, lang_item);
3864 if element_ty.references_error() {
3867 tcx.mk_array(t, count)
3870 hir::ExprTup(ref elts) => {
3871 let flds = expected.only_has_type(self).and_then(|ty| {
3873 ty::TyTuple(ref flds) => Some(&flds[..]),
3878 let elt_ts_iter = elts.iter().enumerate().map(|(i, e)| {
3879 let t = match flds {
3880 Some(ref fs) if i < fs.len() => {
3882 self.check_expr_coercable_to_type(&e, ety);
3886 self.check_expr_with_expectation(&e, NoExpectation)
3891 let tuple = tcx.mk_tup(elt_ts_iter);
3892 if tuple.references_error() {
3898 hir::ExprStruct(ref qpath, ref fields, ref base_expr) => {
3899 self.check_expr_struct(expr, qpath, fields, base_expr)
3901 hir::ExprField(ref base, ref field) => {
3902 self.check_field(expr, lvalue_pref, &base, field)
3904 hir::ExprTupField(ref base, idx) => {
3905 self.check_tup_field(expr, lvalue_pref, &base, idx)
3907 hir::ExprIndex(ref base, ref idx) => {
3908 let base_t = self.check_expr_with_lvalue_pref(&base, lvalue_pref);
3909 let idx_t = self.check_expr(&idx);
3911 if base_t.references_error() {
3913 } else if idx_t.references_error() {
3916 let base_t = self.structurally_resolved_type(expr.span, base_t);
3917 match self.lookup_indexing(expr, base, base_t, idx_t, lvalue_pref) {
3918 Some((index_ty, element_ty)) => {
3919 self.demand_eqtype(expr.span, index_ty, idx_t);
3923 self.check_expr_has_type(&idx, self.tcx.types.err);
3924 let mut err = self.type_error_struct(
3927 format!("cannot index a value of type `{}`",
3931 // Try to give some advice about indexing tuples.
3932 if let ty::TyTuple(_) = base_t.sty {
3933 let mut needs_note = true;
3934 // If the index is an integer, we can show the actual
3935 // fixed expression:
3936 if let hir::ExprLit(ref lit) = idx.node {
3937 if let ast::LitKind::Int(i,
3938 ast::LitIntType::Unsuffixed) = lit.node {
3939 let snip = tcx.sess.codemap().span_to_snippet(base.span);
3940 if let Ok(snip) = snip {
3941 err.span_suggestion(expr.span,
3942 "to access tuple elements, \
3943 use tuple indexing syntax \
3945 format!("{}.{}", snip, i));
3951 err.help("to access tuple elements, use tuple indexing \
3952 syntax (e.g. `tuple.0`)");
3964 // Finish resolving a path in a struct expression or pattern `S::A { .. }` if necessary.
3965 // The newly resolved definition is written into `type_relative_path_defs`.
3966 fn finish_resolving_struct_path(&self,
3969 node_id: ast::NodeId)
3973 hir::QPath::Resolved(ref maybe_qself, ref path) => {
3974 let opt_self_ty = maybe_qself.as_ref().map(|qself| self.to_ty(qself));
3975 let ty = AstConv::def_to_ty(self, self, opt_self_ty, path, node_id, true);
3978 hir::QPath::TypeRelative(ref qself, ref segment) => {
3979 let ty = self.to_ty(qself);
3981 let def = if let hir::TyPath(hir::QPath::Resolved(_, ref path)) = qself.node {
3986 let (ty, def) = AstConv::associated_path_def_to_ty(self, node_id, path_span,
3989 // Write back the new resolution.
3990 self.tables.borrow_mut().type_relative_path_defs.insert(node_id, def);
3997 // Resolve associated value path into a base type and associated constant or method definition.
3998 // The newly resolved definition is written into `type_relative_path_defs`.
3999 pub fn resolve_ty_and_def_ufcs<'b>(&self,
4000 qpath: &'b hir::QPath,
4001 node_id: ast::NodeId,
4003 -> (Def, Option<Ty<'tcx>>, &'b [hir::PathSegment])
4005 let (ty, item_segment) = match *qpath {
4006 hir::QPath::Resolved(ref opt_qself, ref path) => {
4008 opt_qself.as_ref().map(|qself| self.to_ty(qself)),
4009 &path.segments[..]);
4011 hir::QPath::TypeRelative(ref qself, ref segment) => {
4012 (self.to_ty(qself), segment)
4015 let item_name = item_segment.name;
4016 let def = match self.resolve_ufcs(span, item_name, ty, node_id) {
4019 let def = match error {
4020 method::MethodError::PrivateMatch(def) => def,
4023 if item_name != keywords::Invalid.name() {
4024 self.report_method_error(span, ty, item_name, None, error, None);
4030 // Write back the new resolution.
4031 self.tables.borrow_mut().type_relative_path_defs.insert(node_id, def);
4032 (def, Some(ty), slice::ref_slice(&**item_segment))
4035 pub fn check_decl_initializer(&self,
4036 local: &'gcx hir::Local,
4037 init: &'gcx hir::Expr) -> Ty<'tcx>
4039 let ref_bindings = local.pat.contains_ref_binding();
4041 let local_ty = self.local_ty(init.span, local.id);
4042 if let Some(m) = ref_bindings {
4043 // Somewhat subtle: if we have a `ref` binding in the pattern,
4044 // we want to avoid introducing coercions for the RHS. This is
4045 // both because it helps preserve sanity and, in the case of
4046 // ref mut, for soundness (issue #23116). In particular, in
4047 // the latter case, we need to be clear that the type of the
4048 // referent for the reference that results is *equal to* the
4049 // type of the lvalue it is referencing, and not some
4050 // supertype thereof.
4051 let init_ty = self.check_expr_with_lvalue_pref(init, LvaluePreference::from_mutbl(m));
4052 self.demand_eqtype(init.span, init_ty, local_ty);
4055 self.check_expr_coercable_to_type(init, local_ty)
4059 pub fn check_decl_local(&self, local: &'gcx hir::Local) {
4060 let t = self.local_ty(local.span, local.id);
4061 self.write_ty(local.id, t);
4063 if let Some(ref init) = local.init {
4064 let init_ty = self.check_decl_initializer(local, &init);
4065 if init_ty.references_error() {
4066 self.write_ty(local.id, init_ty);
4070 self.check_pat(&local.pat, t);
4071 let pat_ty = self.node_ty(local.pat.id);
4072 if pat_ty.references_error() {
4073 self.write_ty(local.id, pat_ty);
4077 pub fn check_stmt(&self, stmt: &'gcx hir::Stmt) {
4078 // Don't do all the complex logic below for DeclItem.
4080 hir::StmtDecl(ref decl, id) => {
4082 hir::DeclLocal(_) => {}
4083 hir::DeclItem(_) => {
4089 hir::StmtExpr(..) | hir::StmtSemi(..) => {}
4092 self.warn_if_unreachable(stmt.node.id(), stmt.span, "statement");
4094 // Hide the outer diverging and has_errors flags.
4095 let old_diverges = self.diverges.get();
4096 let old_has_errors = self.has_errors.get();
4097 self.diverges.set(Diverges::Maybe);
4098 self.has_errors.set(false);
4100 let (node_id, span) = match stmt.node {
4101 hir::StmtDecl(ref decl, id) => {
4102 let span = match decl.node {
4103 hir::DeclLocal(ref l) => {
4104 self.check_decl_local(&l);
4107 hir::DeclItem(_) => {/* ignore for now */
4113 hir::StmtExpr(ref expr, id) => {
4114 // Check with expected type of ()
4115 self.check_expr_has_type(&expr, self.tcx.mk_nil());
4118 hir::StmtSemi(ref expr, id) => {
4119 self.check_expr(&expr);
4124 if self.has_errors.get() {
4125 self.write_error(node_id);
4126 } else if self.diverges.get().always() {
4127 self.write_ty(node_id, self.next_diverging_ty_var(
4128 TypeVariableOrigin::DivergingStmt(span)));
4130 self.write_nil(node_id);
4133 // Combine the diverging and has_error flags.
4134 self.diverges.set(self.diverges.get() | old_diverges);
4135 self.has_errors.set(self.has_errors.get() | old_has_errors);
4138 pub fn check_block_no_value(&self, blk: &'gcx hir::Block) {
4139 let unit = self.tcx.mk_nil();
4140 let ty = self.check_block_with_expected(blk, ExpectHasType(unit));
4141 self.demand_suptype(blk.span, unit, ty);
4144 fn check_block_with_expected(&self,
4145 blk: &'gcx hir::Block,
4146 expected: Expectation<'tcx>) -> Ty<'tcx> {
4148 let mut fcx_ps = self.ps.borrow_mut();
4149 let unsafety_state = fcx_ps.recurse(blk);
4150 replace(&mut *fcx_ps, unsafety_state)
4153 for s in &blk.stmts {
4157 let mut ty = match blk.expr {
4158 Some(ref e) => self.check_expr_with_expectation(e, expected),
4159 None => self.tcx.mk_nil()
4162 if self.diverges.get().always() {
4163 if let ExpectHasType(ety) = expected {
4164 // Avoid forcing a type (only `!` for now) in unreachable code.
4165 // FIXME(aburka) do we need this special case? and should it be is_uninhabited?
4166 if !ety.is_never() {
4167 if let Some(ref e) = blk.expr {
4168 // Coerce the tail expression to the right type.
4169 self.demand_coerce(e, ty, ety);
4174 ty = self.next_diverging_ty_var(TypeVariableOrigin::DivergingBlockExpr(blk.span));
4175 } else if let ExpectHasType(ety) = expected {
4176 if let Some(ref e) = blk.expr {
4177 // Coerce the tail expression to the right type.
4178 self.demand_coerce(e, ty, ety);
4180 // We're not diverging and there's an expected type, which,
4181 // in case it's not `()`, could result in an error higher-up.
4182 // We have a chance to error here early and be more helpful.
4183 let cause = self.misc(blk.span);
4184 let trace = TypeTrace::types(&cause, false, ty, ety);
4185 match self.sub_types(false, &cause, ty, ety) {
4186 Ok(InferOk { obligations, .. }) => {
4187 // FIXME(#32730) propagate obligations
4188 assert!(obligations.is_empty());
4191 let mut err = self.report_and_explain_type_error(trace, &err);
4193 // Be helpful when the user wrote `{... expr;}` and
4194 // taking the `;` off is enough to fix the error.
4195 let mut extra_semi = None;
4196 if let Some(stmt) = blk.stmts.last() {
4197 if let hir::StmtSemi(ref e, _) = stmt.node {
4198 if self.can_sub_types(self.node_ty(e.id), ety).is_ok() {
4199 extra_semi = Some(stmt);
4203 if let Some(last_stmt) = extra_semi {
4204 let original_span = original_sp(self.tcx.sess.codemap(),
4205 last_stmt.span, blk.span);
4206 let span_semi = Span {
4207 lo: original_span.hi - BytePos(1),
4208 hi: original_span.hi,
4209 expn_id: original_span.expn_id
4211 err.span_help(span_semi, "consider removing this semicolon:");
4219 // We already applied the type (and potentially errored),
4220 // use the expected type to avoid further errors out.
4224 if self.has_errors.get() || ty.references_error() {
4225 ty = self.tcx.types.err
4228 self.write_ty(blk.id, ty);
4230 *self.ps.borrow_mut() = prev;
4234 // Instantiates the given path, which must refer to an item with the given
4235 // number of type parameters and type.
4236 pub fn instantiate_value_path(&self,
4237 segments: &[hir::PathSegment],
4238 opt_self_ty: Option<Ty<'tcx>>,
4241 node_id: ast::NodeId)
4243 debug!("instantiate_value_path(path={:?}, def={:?}, node_id={})",
4248 // We need to extract the type parameters supplied by the user in
4249 // the path `path`. Due to the current setup, this is a bit of a
4250 // tricky-process; the problem is that resolve only tells us the
4251 // end-point of the path resolution, and not the intermediate steps.
4252 // Luckily, we can (at least for now) deduce the intermediate steps
4253 // just from the end-point.
4255 // There are basically four cases to consider:
4257 // 1. Reference to a constructor of enum variant or struct:
4259 // struct Foo<T>(...)
4260 // enum E<T> { Foo(...) }
4262 // In these cases, the parameters are declared in the type
4265 // 2. Reference to a fn item or a free constant:
4269 // In this case, the path will again always have the form
4270 // `a::b::foo::<T>` where only the final segment should have
4271 // type parameters. However, in this case, those parameters are
4272 // declared on a value, and hence are in the `FnSpace`.
4274 // 3. Reference to a method or an associated constant:
4276 // impl<A> SomeStruct<A> {
4280 // Here we can have a path like
4281 // `a::b::SomeStruct::<A>::foo::<B>`, in which case parameters
4282 // may appear in two places. The penultimate segment,
4283 // `SomeStruct::<A>`, contains parameters in TypeSpace, and the
4284 // final segment, `foo::<B>` contains parameters in fn space.
4286 // 4. Reference to a local variable
4288 // Local variables can't have any type parameters.
4290 // The first step then is to categorize the segments appropriately.
4292 assert!(!segments.is_empty());
4294 let mut ufcs_associated = None;
4295 let mut type_segment = None;
4296 let mut fn_segment = None;
4298 // Case 1. Reference to a struct/variant constructor.
4299 Def::StructCtor(def_id, ..) |
4300 Def::VariantCtor(def_id, ..) => {
4301 // Everything but the final segment should have no
4302 // parameters at all.
4303 let mut generics = self.tcx.item_generics(def_id);
4304 if let Some(def_id) = generics.parent {
4305 // Variant and struct constructors use the
4306 // generics of their parent type definition.
4307 generics = self.tcx.item_generics(def_id);
4309 type_segment = Some((segments.last().unwrap(), generics));
4312 // Case 2. Reference to a top-level value.
4314 Def::Const(def_id) |
4315 Def::Static(def_id, _) => {
4316 fn_segment = Some((segments.last().unwrap(),
4317 self.tcx.item_generics(def_id)));
4320 // Case 3. Reference to a method or associated const.
4321 Def::Method(def_id) |
4322 Def::AssociatedConst(def_id) => {
4323 let container = self.tcx.associated_item(def_id).container;
4325 ty::TraitContainer(trait_did) => {
4326 callee::check_legal_trait_for_method_call(self.ccx, span, trait_did)
4328 ty::ImplContainer(_) => {}
4331 let generics = self.tcx.item_generics(def_id);
4332 if segments.len() >= 2 {
4333 let parent_generics = self.tcx.item_generics(generics.parent.unwrap());
4334 type_segment = Some((&segments[segments.len() - 2], parent_generics));
4336 // `<T>::assoc` will end up here, and so can `T::assoc`.
4337 let self_ty = opt_self_ty.expect("UFCS sugared assoc missing Self");
4338 ufcs_associated = Some((container, self_ty));
4340 fn_segment = Some((segments.last().unwrap(), generics));
4343 // Case 4. Local variable, no generics.
4344 Def::Local(..) | Def::Upvar(..) => {}
4346 _ => bug!("unexpected definition: {:?}", def),
4349 debug!("type_segment={:?} fn_segment={:?}", type_segment, fn_segment);
4351 // Now that we have categorized what space the parameters for each
4352 // segment belong to, let's sort out the parameters that the user
4353 // provided (if any) into their appropriate spaces. We'll also report
4354 // errors if type parameters are provided in an inappropriate place.
4355 let poly_segments = type_segment.is_some() as usize +
4356 fn_segment.is_some() as usize;
4357 self.tcx.prohibit_type_params(&segments[..segments.len() - poly_segments]);
4360 Def::Local(def_id) | Def::Upvar(def_id, ..) => {
4361 let nid = self.tcx.hir.as_local_node_id(def_id).unwrap();
4362 let ty = self.local_ty(span, nid);
4363 let ty = self.normalize_associated_types_in(span, &ty);
4364 self.write_ty(node_id, ty);
4365 self.write_substs(node_id, ty::ItemSubsts {
4366 substs: self.tcx.intern_substs(&[])
4373 // Now we have to compare the types that the user *actually*
4374 // provided against the types that were *expected*. If the user
4375 // did not provide any types, then we want to substitute inference
4376 // variables. If the user provided some types, we may still need
4377 // to add defaults. If the user provided *too many* types, that's
4379 self.check_path_parameter_count(span, &mut type_segment);
4380 self.check_path_parameter_count(span, &mut fn_segment);
4382 let (fn_start, has_self) = match (type_segment, fn_segment) {
4383 (_, Some((_, generics))) => {
4384 (generics.parent_count(), generics.has_self)
4386 (Some((_, generics)), None) => {
4387 (generics.own_count(), generics.has_self)
4389 (None, None) => (0, false)
4391 let substs = Substs::for_item(self.tcx, def.def_id(), |def, _| {
4392 let mut i = def.index as usize;
4394 let segment = if i < fn_start {
4395 i -= has_self as usize;
4401 let lifetimes = match segment.map(|(s, _)| &s.parameters) {
4402 Some(&hir::AngleBracketedParameters(ref data)) => &data.lifetimes[..],
4403 Some(&hir::ParenthesizedParameters(_)) => bug!(),
4407 if let Some(ast_lifetime) = lifetimes.get(i) {
4408 ast_region_to_region(self.tcx, ast_lifetime)
4410 self.region_var_for_def(span, def)
4413 let mut i = def.index as usize;
4415 let segment = if i < fn_start {
4416 // Handle Self first, so we can adjust the index to match the AST.
4417 if has_self && i == 0 {
4418 return opt_self_ty.unwrap_or_else(|| {
4419 self.type_var_for_def(span, def, substs)
4422 i -= has_self as usize;
4428 let (types, infer_types) = match segment.map(|(s, _)| &s.parameters) {
4429 Some(&hir::AngleBracketedParameters(ref data)) => {
4430 (&data.types[..], data.infer_types)
4432 Some(&hir::ParenthesizedParameters(_)) => bug!(),
4433 None => (&[][..], true)
4436 // Skip over the lifetimes in the same segment.
4437 if let Some((_, generics)) = segment {
4438 i -= generics.regions.len();
4441 if let Some(ast_ty) = types.get(i) {
4442 // A provided type parameter.
4444 } else if let (false, Some(default)) = (infer_types, def.default) {
4445 // No type parameter provided, but a default exists.
4446 default.subst_spanned(self.tcx, substs, Some(span))
4448 // No type parameters were provided, we can infer all.
4449 // This can also be reached in some error cases:
4450 // We prefer to use inference variables instead of
4451 // TyError to let type inference recover somewhat.
4452 self.type_var_for_def(span, def, substs)
4456 // The things we are substituting into the type should not contain
4457 // escaping late-bound regions, and nor should the base type scheme.
4458 let ty = self.tcx.item_type(def.def_id());
4459 assert!(!substs.has_escaping_regions());
4460 assert!(!ty.has_escaping_regions());
4462 // Add all the obligations that are required, substituting and
4463 // normalized appropriately.
4464 let bounds = self.instantiate_bounds(span, def.def_id(), &substs);
4465 self.add_obligations_for_parameters(
4466 traits::ObligationCause::new(span, self.body_id, traits::ItemObligation(def.def_id())),
4469 // Substitute the values for the type parameters into the type of
4470 // the referenced item.
4471 let ty_substituted = self.instantiate_type_scheme(span, &substs, &ty);
4473 if let Some((ty::ImplContainer(impl_def_id), self_ty)) = ufcs_associated {
4474 // In the case of `Foo<T>::method` and `<Foo<T>>::method`, if `method`
4475 // is inherent, there is no `Self` parameter, instead, the impl needs
4476 // type parameters, which we can infer by unifying the provided `Self`
4477 // with the substituted impl type.
4478 let ty = self.tcx.item_type(impl_def_id);
4480 let impl_ty = self.instantiate_type_scheme(span, &substs, &ty);
4481 match self.sub_types(false, &self.misc(span), self_ty, impl_ty) {
4482 Ok(ok) => self.register_infer_ok_obligations(ok),
4485 "instantiate_value_path: (UFCS) {:?} was a subtype of {:?} but now is not?",
4492 debug!("instantiate_value_path: type of {:?} is {:?}",
4495 self.write_substs(node_id, ty::ItemSubsts {
4501 /// Report errors if the provided parameters are too few or too many.
4502 fn check_path_parameter_count(&self,
4504 segment: &mut Option<(&hir::PathSegment, &ty::Generics)>) {
4505 let (lifetimes, types, infer_types, bindings) = {
4506 match segment.map(|(s, _)| &s.parameters) {
4507 Some(&hir::AngleBracketedParameters(ref data)) => {
4508 (&data.lifetimes[..], &data.types[..], data.infer_types, &data.bindings[..])
4510 Some(&hir::ParenthesizedParameters(_)) => {
4511 span_bug!(span, "parenthesized parameters cannot appear in ExprPath");
4513 None => (&[][..], &[][..], true, &[][..])
4518 format!("{} parameter{}", n, if n == 1 { "" } else { "s" })
4521 // Check provided lifetime parameters.
4522 let lifetime_defs = segment.map_or(&[][..], |(_, generics)| &generics.regions);
4523 if lifetimes.len() > lifetime_defs.len() {
4524 struct_span_err!(self.tcx.sess, span, E0088,
4525 "too many lifetime parameters provided: \
4526 expected {}, found {}",
4527 count(lifetime_defs.len()),
4528 count(lifetimes.len()))
4529 .span_label(span, &format!("unexpected lifetime parameter{}",
4530 match lifetimes.len() { 1 => "", _ => "s" }))
4532 } else if lifetimes.len() > 0 && lifetimes.len() < lifetime_defs.len() {
4533 struct_span_err!(self.tcx.sess, span, E0090,
4534 "too few lifetime parameters provided: \
4535 expected {}, found {}",
4536 count(lifetime_defs.len()),
4537 count(lifetimes.len()))
4538 .span_label(span, &format!("too few lifetime parameters"))
4542 // The case where there is not enough lifetime parameters is not checked,
4543 // because this is not possible - a function never takes lifetime parameters.
4544 // See discussion for Pull Request 36208.
4546 // Check provided type parameters.
4547 let type_defs = segment.map_or(&[][..], |(_, generics)| {
4548 if generics.parent.is_none() {
4549 &generics.types[generics.has_self as usize..]
4554 let required_len = type_defs.iter()
4555 .take_while(|d| d.default.is_none())
4557 if types.len() > type_defs.len() {
4558 let span = types[type_defs.len()].span;
4559 struct_span_err!(self.tcx.sess, span, E0087,
4560 "too many type parameters provided: \
4561 expected at most {}, found {}",
4562 count(type_defs.len()),
4564 .span_label(span, &format!("too many type parameters")).emit();
4566 // To prevent derived errors to accumulate due to extra
4567 // type parameters, we force instantiate_value_path to
4568 // use inference variables instead of the provided types.
4570 } else if !infer_types && types.len() < required_len {
4571 let adjust = |len| if len > 1 { "parameters" } else { "parameter" };
4572 let required_param_str = adjust(required_len);
4573 let actual_param_str = adjust(types.len());
4574 struct_span_err!(self.tcx.sess, span, E0089,
4575 "too few type parameters provided: \
4576 expected {} {}, found {} {}",
4577 count(required_len),
4581 .span_label(span, &format!("expected {} type {}", required_len, required_param_str))
4585 if !bindings.is_empty() {
4586 span_err!(self.tcx.sess, bindings[0].span, E0182,
4587 "unexpected binding of associated item in expression path \
4588 (only allowed in type paths)");
4592 fn structurally_resolve_type_or_else<F>(&self, sp: Span, ty: Ty<'tcx>, f: F)
4594 where F: Fn() -> Ty<'tcx>
4596 let mut ty = self.resolve_type_vars_with_obligations(ty);
4599 let alternative = f();
4602 if alternative.is_ty_var() || alternative.references_error() {
4603 if !self.is_tainted_by_errors() {
4604 self.type_error_message(sp, |_actual| {
4605 "the type of this value must be known in this context".to_string()
4608 self.demand_suptype(sp, self.tcx.types.err, ty);
4609 ty = self.tcx.types.err;
4611 self.demand_suptype(sp, alternative, ty);
4619 // Resolves `typ` by a single level if `typ` is a type variable. If no
4620 // resolution is possible, then an error is reported.
4621 pub fn structurally_resolved_type(&self, sp: Span, ty: Ty<'tcx>) -> Ty<'tcx> {
4622 self.structurally_resolve_type_or_else(sp, ty, || {
4627 fn with_loop_ctxt<F: FnOnce()>(&self, id: ast::NodeId, ctxt: LoopCtxt<'gcx, 'tcx>, f: F)
4628 -> LoopCtxt<'gcx, 'tcx> {
4631 let mut enclosing_loops = self.enclosing_loops.borrow_mut();
4632 index = enclosing_loops.stack.len();
4633 enclosing_loops.by_id.insert(id, index);
4634 enclosing_loops.stack.push(ctxt);
4638 let mut enclosing_loops = self.enclosing_loops.borrow_mut();
4639 debug_assert!(enclosing_loops.stack.len() == index + 1);
4640 enclosing_loops.by_id.remove(&id).expect("missing loop context");
4641 (enclosing_loops.stack.pop().expect("missing loop context"))
4646 pub fn check_bounds_are_used<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
4647 generics: &hir::Generics,
4649 debug!("check_bounds_are_used(n_tps={}, ty={:?})",
4650 generics.ty_params.len(), ty);
4652 // make a vector of booleans initially false, set to true when used
4653 if generics.ty_params.is_empty() { return; }
4654 let mut tps_used = vec![false; generics.ty_params.len()];
4656 for leaf_ty in ty.walk() {
4657 if let ty::TyParam(ParamTy {idx, ..}) = leaf_ty.sty {
4658 debug!("Found use of ty param num {}", idx);
4659 tps_used[idx as usize - generics.lifetimes.len()] = true;
4663 for (&used, param) in tps_used.iter().zip(&generics.ty_params) {
4665 struct_span_err!(ccx.tcx.sess, param.span, E0091,
4666 "type parameter `{}` is unused",
4668 .span_label(param.span, &format!("unused type parameter"))