1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
15 Within the check phase of type check, we check each item one at a time
16 (bodies of function expressions are checked as part of the containing
17 function). Inference is used to supply types wherever they are
20 By far the most complex case is checking the body of a function. This
21 can be broken down into several distinct phases:
23 - gather: creates type variables to represent the type of each local
24 variable and pattern binding.
26 - main: the main pass does the lion's share of the work: it
27 determines the types of all expressions, resolves
28 methods, checks for most invalid conditions, and so forth. In
29 some cases, where a type is unknown, it may create a type or region
30 variable and use that as the type of an expression.
32 In the process of checking, various constraints will be placed on
33 these type variables through the subtyping relationships requested
34 through the `demand` module. The `infer` module is in charge
35 of resolving those constraints.
37 - regionck: after main is complete, the regionck pass goes over all
38 types looking for regions and making sure that they did not escape
39 into places they are not in scope. This may also influence the
40 final assignments of the various region variables if there is some
43 - vtable: find and records the impls to use for each trait bound that
44 appears on a type parameter.
46 - writeback: writes the final types within a function body, replacing
47 type variables with their final inferred types. These final types
48 are written into the `tcx.node_types` table, which should *never* contain
49 any reference to a type variable.
53 While type checking a function, the intermediate types for the
54 expressions, blocks, and so forth contained within the function are
55 stored in `fcx.node_types` and `fcx.item_substs`. These types
56 may contain unresolved type variables. After type checking is
57 complete, the functions in the writeback module are used to take the
58 types from this table, resolve them, and then write them into their
59 permanent home in the type context `ccx.tcx`.
61 This means that during inferencing you should use `fcx.write_ty()`
62 and `fcx.expr_ty()` / `fcx.node_ty()` to write/obtain the types of
63 nodes within the function.
65 The types of top-level items, which never contain unbound type
66 variables, are stored directly into the `tcx` tables.
68 n.b.: A type variable is not the same thing as a type parameter. A
69 type variable is rather an "instance" of a type parameter: that is,
70 given a generic function `fn foo<T>(t: T)`: while checking the
71 function `foo`, the type `ty_param(0)` refers to the type `T`, which
72 is treated in abstract. When `foo()` is called, however, `T` will be
73 substituted for a fresh type variable `N`. This variable will
74 eventually be resolved to some concrete type (which might itself be
79 pub use self::Expectation::*;
80 pub use self::compare_method::{compare_impl_method, compare_const_impl};
81 use self::TupleArgumentsFlag::*;
84 use dep_graph::DepNode;
85 use fmt_macros::{Parser, Piece, Position};
86 use hir::def::{Def, CtorKind};
87 use hir::def_id::{DefId, LOCAL_CRATE};
88 use rustc::infer::{self, InferCtxt, InferOk, RegionVariableOrigin, TypeTrace};
89 use rustc::infer::type_variable::{self, TypeVariableOrigin};
90 use rustc::ty::subst::{Kind, Subst, Substs};
91 use rustc::traits::{self, ObligationCause, ObligationCauseCode, Reveal};
92 use rustc::ty::{ParamTy, ParameterEnvironment};
93 use rustc::ty::{LvaluePreference, NoPreference, PreferMutLvalue};
94 use rustc::ty::{self, ToPolyTraitRef, Ty, TyCtxt, Visibility};
95 use rustc::ty::{MethodCall, MethodCallee};
96 use rustc::ty::adjustment;
97 use rustc::ty::fold::{BottomUpFolder, TypeFoldable};
98 use rustc::ty::util::{Representability, IntTypeExt};
99 use require_c_abi_if_variadic;
100 use rscope::{ElisionFailureInfo, RegionScope};
101 use session::{Session, CompileResult};
105 use util::common::{ErrorReported, indenter};
106 use util::nodemap::{DefIdMap, FxHashMap, FxHashSet, NodeMap};
108 use std::cell::{Cell, RefCell};
110 use std::mem::replace;
111 use std::ops::{self, Deref};
112 use syntax::abi::Abi;
115 use syntax::codemap::{self, original_sp, Spanned};
116 use syntax::feature_gate::{GateIssue, emit_feature_err};
118 use syntax::symbol::{Symbol, InternedString, keywords};
119 use syntax::util::lev_distance::find_best_match_for_name;
120 use syntax_pos::{self, BytePos, Span, DUMMY_SP};
122 use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
123 use rustc::hir::itemlikevisit::ItemLikeVisitor;
124 use rustc::hir::{self, PatKind};
125 use rustc::middle::lang_items;
126 use rustc_back::slice;
127 use rustc_const_eval::eval_length;
147 /// closures defined within the function. For example:
150 /// bar(move|| { ... })
153 /// Here, the function `foo()` and the closure passed to
154 /// `bar()` will each have their own `FnCtxt`, but they will
155 /// share the inherited fields.
156 pub struct Inherited<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
157 ccx: &'a CrateCtxt<'a, 'gcx>,
158 infcx: InferCtxt<'a, 'gcx, 'tcx>,
159 locals: RefCell<NodeMap<Ty<'tcx>>>,
161 fulfillment_cx: RefCell<traits::FulfillmentContext<'tcx>>,
163 // When we process a call like `c()` where `c` is a closure type,
164 // we may not have decided yet whether `c` is a `Fn`, `FnMut`, or
165 // `FnOnce` closure. In that case, we defer full resolution of the
166 // call until upvar inference can kick in and make the
167 // decision. We keep these deferred resolutions grouped by the
168 // def-id of the closure, so that once we decide, we can easily go
169 // back and process them.
170 deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolutionHandler<'gcx, 'tcx>>>>,
172 deferred_cast_checks: RefCell<Vec<cast::CastCheck<'tcx>>>,
174 // Anonymized types found in explicit return types and their
175 // associated fresh inference variable. Writeback resolves these
176 // variables to get the concrete type, which can be used to
177 // deanonymize TyAnon, after typeck is done with all functions.
178 anon_types: RefCell<DefIdMap<Ty<'tcx>>>,
180 // Obligations which will have to be checked at the end of
181 // type-checking, after all functions have been inferred.
182 deferred_obligations: RefCell<Vec<traits::DeferredObligation<'tcx>>>,
185 impl<'a, 'gcx, 'tcx> Deref for Inherited<'a, 'gcx, 'tcx> {
186 type Target = InferCtxt<'a, 'gcx, 'tcx>;
187 fn deref(&self) -> &Self::Target {
192 trait DeferredCallResolution<'gcx, 'tcx> {
193 fn resolve<'a>(&mut self, fcx: &FnCtxt<'a, 'gcx, 'tcx>);
196 type DeferredCallResolutionHandler<'gcx, 'tcx> = Box<DeferredCallResolution<'gcx, 'tcx>+'tcx>;
198 /// When type-checking an expression, we propagate downward
199 /// whatever type hint we are able in the form of an `Expectation`.
200 #[derive(Copy, Clone, Debug)]
201 pub enum Expectation<'tcx> {
202 /// We know nothing about what type this expression should have.
205 /// This expression should have the type given (or some subtype)
206 ExpectHasType(Ty<'tcx>),
208 /// This expression will be cast to the `Ty`
209 ExpectCastableToType(Ty<'tcx>),
211 /// This rvalue expression will be wrapped in `&` or `Box` and coerced
212 /// to `&Ty` or `Box<Ty>`, respectively. `Ty` is `[A]` or `Trait`.
213 ExpectRvalueLikeUnsized(Ty<'tcx>),
216 impl<'a, 'gcx, 'tcx> Expectation<'tcx> {
217 // Disregard "castable to" expectations because they
218 // can lead us astray. Consider for example `if cond
219 // {22} else {c} as u8` -- if we propagate the
220 // "castable to u8" constraint to 22, it will pick the
221 // type 22u8, which is overly constrained (c might not
222 // be a u8). In effect, the problem is that the
223 // "castable to" expectation is not the tightest thing
224 // we can say, so we want to drop it in this case.
225 // The tightest thing we can say is "must unify with
226 // else branch". Note that in the case of a "has type"
227 // constraint, this limitation does not hold.
229 // If the expected type is just a type variable, then don't use
230 // an expected type. Otherwise, we might write parts of the type
231 // when checking the 'then' block which are incompatible with the
233 fn adjust_for_branches(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Expectation<'tcx> {
235 ExpectHasType(ety) => {
236 let ety = fcx.shallow_resolve(ety);
237 if !ety.is_ty_var() {
243 ExpectRvalueLikeUnsized(ety) => {
244 ExpectRvalueLikeUnsized(ety)
250 /// Provide an expectation for an rvalue expression given an *optional*
251 /// hint, which is not required for type safety (the resulting type might
252 /// be checked higher up, as is the case with `&expr` and `box expr`), but
253 /// is useful in determining the concrete type.
255 /// The primary use case is where the expected type is a fat pointer,
256 /// like `&[isize]`. For example, consider the following statement:
258 /// let x: &[isize] = &[1, 2, 3];
260 /// In this case, the expected type for the `&[1, 2, 3]` expression is
261 /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
262 /// expectation `ExpectHasType([isize])`, that would be too strong --
263 /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
264 /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
265 /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
266 /// which still is useful, because it informs integer literals and the like.
267 /// See the test case `test/run-pass/coerce-expect-unsized.rs` and #20169
268 /// for examples of where this comes up,.
269 fn rvalue_hint(fcx: &FnCtxt<'a, 'gcx, 'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> {
270 match fcx.tcx.struct_tail(ty).sty {
271 ty::TySlice(_) | ty::TyStr | ty::TyDynamic(..) => {
272 ExpectRvalueLikeUnsized(ty)
274 _ => ExpectHasType(ty)
278 // Resolves `expected` by a single level if it is a variable. If
279 // there is no expected type or resolution is not possible (e.g.,
280 // no constraints yet present), just returns `None`.
281 fn resolve(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Expectation<'tcx> {
286 ExpectCastableToType(t) => {
287 ExpectCastableToType(fcx.resolve_type_vars_if_possible(&t))
289 ExpectHasType(t) => {
290 ExpectHasType(fcx.resolve_type_vars_if_possible(&t))
292 ExpectRvalueLikeUnsized(t) => {
293 ExpectRvalueLikeUnsized(fcx.resolve_type_vars_if_possible(&t))
298 fn to_option(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
299 match self.resolve(fcx) {
300 NoExpectation => None,
301 ExpectCastableToType(ty) |
303 ExpectRvalueLikeUnsized(ty) => Some(ty),
307 fn only_has_type(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
308 match self.resolve(fcx) {
309 ExpectHasType(ty) => Some(ty),
315 #[derive(Copy, Clone)]
316 pub struct UnsafetyState {
317 pub def: ast::NodeId,
318 pub unsafety: hir::Unsafety,
319 pub unsafe_push_count: u32,
324 pub fn function(unsafety: hir::Unsafety, def: ast::NodeId) -> UnsafetyState {
325 UnsafetyState { def: def, unsafety: unsafety, unsafe_push_count: 0, from_fn: true }
328 pub fn recurse(&mut self, blk: &hir::Block) -> UnsafetyState {
329 match self.unsafety {
330 // If this unsafe, then if the outer function was already marked as
331 // unsafe we shouldn't attribute the unsafe'ness to the block. This
332 // way the block can be warned about instead of ignoring this
333 // extraneous block (functions are never warned about).
334 hir::Unsafety::Unsafe if self.from_fn => *self,
337 let (unsafety, def, count) = match blk.rules {
338 hir::PushUnsafeBlock(..) =>
339 (unsafety, blk.id, self.unsafe_push_count.checked_add(1).unwrap()),
340 hir::PopUnsafeBlock(..) =>
341 (unsafety, blk.id, self.unsafe_push_count.checked_sub(1).unwrap()),
342 hir::UnsafeBlock(..) =>
343 (hir::Unsafety::Unsafe, blk.id, self.unsafe_push_count),
345 (unsafety, self.def, self.unsafe_push_count),
347 UnsafetyState{ def: def,
349 unsafe_push_count: count,
356 /// Whether a node ever exits normally or not.
357 /// Tracked semi-automatically (through type variables
358 /// marked as diverging), with some manual adjustments
359 /// for control-flow primitives (approximating a CFG).
360 #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
362 /// Potentially unknown, some cases converge,
363 /// others require a CFG to determine them.
366 /// Definitely known to diverge and therefore
367 /// not reach the next sibling or its parent.
370 /// Same as `Always` but with a reachability
371 /// warning already emitted
375 // Convenience impls for combinig `Diverges`.
377 impl ops::BitAnd for Diverges {
379 fn bitand(self, other: Self) -> Self {
380 cmp::min(self, other)
384 impl ops::BitOr for Diverges {
386 fn bitor(self, other: Self) -> Self {
387 cmp::max(self, other)
391 impl ops::BitAndAssign for Diverges {
392 fn bitand_assign(&mut self, other: Self) {
393 *self = *self & other;
397 impl ops::BitOrAssign for Diverges {
398 fn bitor_assign(&mut self, other: Self) {
399 *self = *self | other;
404 fn always(self) -> bool {
405 self >= Diverges::Always
410 pub struct LoopCtxt<'gcx, 'tcx> {
413 break_exprs: Vec<&'gcx hir::Expr>,
418 pub struct EnclosingLoops<'gcx, 'tcx> {
419 stack: Vec<LoopCtxt<'gcx, 'tcx>>,
420 by_id: NodeMap<usize>,
423 impl<'gcx, 'tcx> EnclosingLoops<'gcx, 'tcx> {
424 fn find_loop(&mut self, id: Option<ast::NodeId>) -> Option<&mut LoopCtxt<'gcx, 'tcx>> {
425 if let Some(id) = id {
426 if let Some(ix) = self.by_id.get(&id).cloned() {
427 Some(&mut self.stack[ix])
432 self.stack.last_mut()
438 pub struct FnCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
439 ast_ty_to_ty_cache: RefCell<NodeMap<Ty<'tcx>>>,
441 body_id: ast::NodeId,
443 // This flag is set to true if, during the writeback phase, we encounter
444 // a type error in this function.
445 writeback_errors: Cell<bool>,
447 // Number of errors that had been reported when we started
448 // checking this function. On exit, if we find that *more* errors
449 // have been reported, we will skip regionck and other work that
450 // expects the types within the function to be consistent.
451 err_count_on_creation: usize,
453 ret_ty: Option<Ty<'tcx>>,
455 ps: RefCell<UnsafetyState>,
457 /// Whether the last checked node can ever exit.
458 diverges: Cell<Diverges>,
460 /// Whether any child nodes have any type errors.
461 has_errors: Cell<bool>,
463 enclosing_loops: RefCell<EnclosingLoops<'gcx, 'tcx>>,
465 inh: &'a Inherited<'a, 'gcx, 'tcx>,
468 impl<'a, 'gcx, 'tcx> Deref for FnCtxt<'a, 'gcx, 'tcx> {
469 type Target = Inherited<'a, 'gcx, 'tcx>;
470 fn deref(&self) -> &Self::Target {
475 /// Helper type of a temporary returned by ccx.inherited(...).
476 /// Necessary because we can't write the following bound:
477 /// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Inherited<'b, 'gcx, 'tcx>).
478 pub struct InheritedBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
479 ccx: &'a CrateCtxt<'a, 'gcx>,
480 infcx: infer::InferCtxtBuilder<'a, 'gcx, 'tcx>
483 impl<'a, 'gcx, 'tcx> CrateCtxt<'a, 'gcx> {
484 pub fn inherited(&'a self, id: ast::NodeId)
485 -> InheritedBuilder<'a, 'gcx, 'tcx> {
486 let tables = ty::TypeckTables::empty();
487 let param_env = ParameterEnvironment::for_item(self.tcx, id);
490 infcx: self.tcx.infer_ctxt((tables, param_env), Reveal::NotSpecializable)
495 impl<'a, 'gcx, 'tcx> InheritedBuilder<'a, 'gcx, 'tcx> {
496 fn enter<F, R>(&'tcx mut self, f: F) -> R
497 where F: for<'b> FnOnce(Inherited<'b, 'gcx, 'tcx>) -> R
500 self.infcx.enter(|infcx| f(Inherited::new(ccx, infcx)))
504 impl<'a, 'gcx, 'tcx> Inherited<'a, 'gcx, 'tcx> {
505 pub fn new(ccx: &'a CrateCtxt<'a, 'gcx>,
506 infcx: InferCtxt<'a, 'gcx, 'tcx>)
511 fulfillment_cx: RefCell::new(traits::FulfillmentContext::new()),
512 locals: RefCell::new(NodeMap()),
513 deferred_call_resolutions: RefCell::new(DefIdMap()),
514 deferred_cast_checks: RefCell::new(Vec::new()),
515 anon_types: RefCell::new(DefIdMap()),
516 deferred_obligations: RefCell::new(Vec::new()),
520 fn normalize_associated_types_in<T>(&self,
522 body_id: ast::NodeId,
525 where T : TypeFoldable<'tcx>
527 assoc::normalize_associated_types_in(self,
528 &mut self.fulfillment_cx.borrow_mut(),
536 struct CheckItemTypesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
537 struct CheckItemBodiesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
539 impl<'a, 'tcx> Visitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> {
540 fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
541 NestedVisitorMap::OnlyBodies(&self.ccx.tcx.hir)
544 fn visit_item(&mut self, i: &'tcx hir::Item) {
545 check_item_type(self.ccx, i);
546 intravisit::walk_item(self, i);
549 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
551 hir::TyArray(_, length) => {
552 check_const_with_type(self.ccx, length, self.ccx.tcx.types.usize, length.node_id);
557 intravisit::walk_ty(self, t);
560 fn visit_expr(&mut self, e: &'tcx hir::Expr) {
562 hir::ExprRepeat(_, count) => {
563 check_const_with_type(self.ccx, count, self.ccx.tcx.types.usize, count.node_id);
568 intravisit::walk_expr(self, e);
572 impl<'a, 'tcx> ItemLikeVisitor<'tcx> for CheckItemBodiesVisitor<'a, 'tcx> {
573 fn visit_item(&mut self, item: &'tcx hir::Item) {
575 hir::ItemFn(ref decl, .., body_id) => {
576 check_bare_fn(self.ccx, &decl, body_id, item.id, item.span);
582 fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) {
583 match trait_item.node {
584 hir::TraitItemKind::Const(_, Some(expr)) => {
585 check_const(self.ccx, expr, trait_item.id)
587 hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Provided(body_id)) => {
588 check_bare_fn(self.ccx, &sig.decl, body_id, trait_item.id, trait_item.span);
590 hir::TraitItemKind::Method(_, hir::TraitMethod::Required(_)) |
591 hir::TraitItemKind::Const(_, None) |
592 hir::TraitItemKind::Type(..) => {
598 fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) {
599 match impl_item.node {
600 hir::ImplItemKind::Const(_, expr) => {
601 check_const(self.ccx, expr, impl_item.id)
603 hir::ImplItemKind::Method(ref sig, body_id) => {
604 check_bare_fn(self.ccx, &sig.decl, body_id, impl_item.id, impl_item.span);
606 hir::ImplItemKind::Type(_) => {
607 // Nothing to do here.
613 pub fn check_wf_new(ccx: &CrateCtxt) -> CompileResult {
614 ccx.tcx.sess.track_errors(|| {
615 let mut visit = wfcheck::CheckTypeWellFormedVisitor::new(ccx);
616 ccx.tcx.visit_all_item_likes_in_krate(DepNode::WfCheck, &mut visit.as_deep_visitor());
620 pub fn check_item_types(ccx: &CrateCtxt) -> CompileResult {
621 ccx.tcx.sess.track_errors(|| {
622 let mut visit = CheckItemTypesVisitor { ccx: ccx };
623 ccx.tcx.visit_all_item_likes_in_krate(DepNode::TypeckItemType,
624 &mut visit.as_deep_visitor());
628 pub fn check_item_bodies(ccx: &CrateCtxt) -> CompileResult {
629 ccx.tcx.sess.track_errors(|| {
630 let mut visit = CheckItemBodiesVisitor { ccx: ccx };
631 ccx.tcx.visit_all_item_likes_in_krate(DepNode::TypeckTables, &mut visit);
633 // Process deferred obligations, now that all functions
634 // bodies have been fully inferred.
635 for (&item_id, obligations) in ccx.deferred_obligations.borrow().iter() {
636 // Use the same DepNode as for the body of the original function/item.
637 let def_id = ccx.tcx.hir.local_def_id(item_id);
638 let _task = ccx.tcx.dep_graph.in_task(DepNode::TypeckTables(def_id));
640 let param_env = ParameterEnvironment::for_item(ccx.tcx, item_id);
641 ccx.tcx.infer_ctxt(param_env, Reveal::NotSpecializable).enter(|infcx| {
642 let mut fulfillment_cx = traits::FulfillmentContext::new();
643 for obligation in obligations.iter().map(|o| o.to_obligation()) {
644 fulfillment_cx.register_predicate_obligation(&infcx, obligation);
647 if let Err(errors) = fulfillment_cx.select_all_or_error(&infcx) {
648 infcx.report_fulfillment_errors(&errors);
655 pub fn check_drop_impls(ccx: &CrateCtxt) -> CompileResult {
656 ccx.tcx.sess.track_errors(|| {
657 let _task = ccx.tcx.dep_graph.in_task(DepNode::Dropck);
658 let drop_trait = match ccx.tcx.lang_items.drop_trait() {
659 Some(id) => ccx.tcx.lookup_trait_def(id), None => { return }
661 drop_trait.for_each_impl(ccx.tcx, |drop_impl_did| {
662 let _task = ccx.tcx.dep_graph.in_task(DepNode::DropckImpl(drop_impl_did));
663 if drop_impl_did.is_local() {
664 match dropck::check_drop_impl(ccx, drop_impl_did) {
667 assert!(ccx.tcx.sess.has_errors());
675 fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
676 decl: &'tcx hir::FnDecl,
677 body_id: hir::BodyId,
680 let body = ccx.tcx.hir.body(body_id);
682 let raw_fty = ccx.tcx.item_type(ccx.tcx.hir.local_def_id(fn_id));
683 let fn_ty = match raw_fty.sty {
684 ty::TyFnDef(.., f) => f,
685 _ => span_bug!(body.value.span, "check_bare_fn: function type expected")
688 check_abi(ccx, span, fn_ty.abi);
690 ccx.inherited(fn_id).enter(|inh| {
691 // Compute the fty from point of view of inside fn.
692 let fn_scope = inh.tcx.region_maps.call_site_extent(fn_id, body_id.node_id);
694 fn_ty.sig.subst(inh.tcx, &inh.parameter_environment.free_substs);
696 inh.tcx.liberate_late_bound_regions(fn_scope, &fn_sig);
698 inh.normalize_associated_types_in(body.value.span, body_id.node_id, &fn_sig);
700 let fcx = check_fn(&inh, fn_ty.unsafety, fn_id, &fn_sig, decl, fn_id, body);
702 fcx.select_all_obligations_and_apply_defaults();
703 fcx.closure_analyze(body);
704 fcx.select_obligations_where_possible();
706 fcx.select_all_obligations_or_error(); // Casts can introduce new obligations.
708 fcx.regionck_fn(fn_id, body);
709 fcx.resolve_type_vars_in_body(body);
713 fn check_abi<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, span: Span, abi: Abi) {
714 if !ccx.tcx.sess.target.target.is_abi_supported(abi) {
715 struct_span_err!(ccx.tcx.sess, span, E0570,
716 "The ABI `{}` is not supported for the current target", abi).emit()
720 struct GatherLocalsVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
721 fcx: &'a FnCtxt<'a, 'gcx, 'tcx>
724 impl<'a, 'gcx, 'tcx> GatherLocalsVisitor<'a, 'gcx, 'tcx> {
725 fn assign(&mut self, span: Span, nid: ast::NodeId, ty_opt: Option<Ty<'tcx>>) -> Ty<'tcx> {
728 // infer the variable's type
729 let var_ty = self.fcx.next_ty_var(TypeVariableOrigin::TypeInference(span));
730 self.fcx.locals.borrow_mut().insert(nid, var_ty);
734 // take type that the user specified
735 self.fcx.locals.borrow_mut().insert(nid, typ);
742 impl<'a, 'gcx, 'tcx> Visitor<'gcx> for GatherLocalsVisitor<'a, 'gcx, 'tcx> {
743 fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'gcx> {
744 NestedVisitorMap::None
747 // Add explicitly-declared locals.
748 fn visit_local(&mut self, local: &'gcx hir::Local) {
749 let o_ty = match local.ty {
750 Some(ref ty) => Some(self.fcx.to_ty(&ty)),
753 self.assign(local.span, local.id, o_ty);
754 debug!("Local variable {:?} is assigned type {}",
756 self.fcx.ty_to_string(
757 self.fcx.locals.borrow().get(&local.id).unwrap().clone()));
758 intravisit::walk_local(self, local);
761 // Add pattern bindings.
762 fn visit_pat(&mut self, p: &'gcx hir::Pat) {
763 if let PatKind::Binding(_, _, ref path1, _) = p.node {
764 let var_ty = self.assign(p.span, p.id, None);
766 self.fcx.require_type_is_sized(var_ty, p.span,
767 traits::VariableType(p.id));
769 debug!("Pattern binding {} is assigned to {} with type {:?}",
771 self.fcx.ty_to_string(
772 self.fcx.locals.borrow().get(&p.id).unwrap().clone()),
775 intravisit::walk_pat(self, p);
778 // Don't descend into the bodies of nested closures
779 fn visit_fn(&mut self, _: intravisit::FnKind<'gcx>, _: &'gcx hir::FnDecl,
780 _: hir::BodyId, _: Span, _: ast::NodeId) { }
783 /// Helper used by check_bare_fn and check_expr_fn. Does the grungy work of checking a function
784 /// body and returns the function context used for that purpose, since in the case of a fn item
785 /// there is still a bit more to do.
788 /// * inherited: other fields inherited from the enclosing fn (if any)
789 fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>,
790 unsafety: hir::Unsafety,
791 unsafety_id: ast::NodeId,
792 fn_sig: &ty::FnSig<'tcx>,
793 decl: &'gcx hir::FnDecl,
795 body: &'gcx hir::Body)
796 -> FnCtxt<'a, 'gcx, 'tcx>
798 let mut fn_sig = fn_sig.clone();
800 debug!("check_fn(sig={:?}, fn_id={})", fn_sig, fn_id);
802 // Create the function context. This is either derived from scratch or,
803 // in the case of function expressions, based on the outer context.
804 let mut fcx = FnCtxt::new(inherited, None, body.value.id);
805 let ret_ty = fn_sig.output();
806 *fcx.ps.borrow_mut() = UnsafetyState::function(unsafety, unsafety_id);
808 fcx.require_type_is_sized(ret_ty, decl.output.span(), traits::ReturnType);
809 fcx.ret_ty = fcx.instantiate_anon_types(&Some(ret_ty));
810 fn_sig = fcx.tcx.mk_fn_sig(fn_sig.inputs().iter().cloned(), &fcx.ret_ty.unwrap(),
813 GatherLocalsVisitor { fcx: &fcx, }.visit_body(body);
815 // Add formal parameters.
816 for (arg_ty, arg) in fn_sig.inputs().iter().zip(&body.arguments) {
817 // The type of the argument must be well-formed.
819 // NB -- this is now checked in wfcheck, but that
820 // currently only results in warnings, so we issue an
821 // old-style WF obligation here so that we still get the
822 // errors that we used to get.
823 fcx.register_old_wf_obligation(arg_ty, arg.pat.span, traits::MiscObligation);
825 // Check the pattern.
826 fcx.check_pat_arg(&arg.pat, arg_ty, true);
827 fcx.write_ty(arg.id, arg_ty);
830 inherited.tables.borrow_mut().liberated_fn_sigs.insert(fn_id, fn_sig);
832 fcx.check_expr_coercable_to_type(&body.value, fcx.ret_ty.unwrap());
837 fn check_struct(ccx: &CrateCtxt, id: ast::NodeId, span: Span) {
838 let def_id = ccx.tcx.hir.local_def_id(id);
839 check_representable(ccx.tcx, span, def_id);
841 if ccx.tcx.lookup_simd(def_id) {
842 check_simd(ccx.tcx, span, def_id);
846 fn check_union(ccx: &CrateCtxt, id: ast::NodeId, span: Span) {
847 check_representable(ccx.tcx, span, ccx.tcx.hir.local_def_id(id));
850 pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
851 debug!("check_item_type(it.id={}, it.name={})",
853 ccx.tcx.item_path_str(ccx.tcx.hir.local_def_id(it.id)));
854 let _indenter = indenter();
856 // Consts can play a role in type-checking, so they are included here.
857 hir::ItemStatic(.., e) |
858 hir::ItemConst(_, e) => check_const(ccx, e, it.id),
859 hir::ItemEnum(ref enum_definition, _) => {
860 check_enum_variants(ccx,
862 &enum_definition.variants,
865 hir::ItemFn(..) => {} // entirely within check_item_body
866 hir::ItemImpl(.., ref impl_item_refs) => {
867 debug!("ItemImpl {} with id {}", it.name, it.id);
868 let impl_def_id = ccx.tcx.hir.local_def_id(it.id);
869 if let Some(impl_trait_ref) = ccx.tcx.impl_trait_ref(impl_def_id) {
870 check_impl_items_against_trait(ccx,
875 let trait_def_id = impl_trait_ref.def_id;
876 check_on_unimplemented(ccx, trait_def_id, it);
879 hir::ItemTrait(..) => {
880 let def_id = ccx.tcx.hir.local_def_id(it.id);
881 check_on_unimplemented(ccx, def_id, it);
883 hir::ItemStruct(..) => {
884 check_struct(ccx, it.id, it.span);
886 hir::ItemUnion(..) => {
887 check_union(ccx, it.id, it.span);
889 hir::ItemTy(_, ref generics) => {
890 let def_id = ccx.tcx.hir.local_def_id(it.id);
891 let pty_ty = ccx.tcx.item_type(def_id);
892 check_bounds_are_used(ccx, generics, pty_ty);
894 hir::ItemForeignMod(ref m) => {
895 check_abi(ccx, it.span, m.abi);
897 if m.abi == Abi::RustIntrinsic {
898 for item in &m.items {
899 intrinsic::check_intrinsic_type(ccx, item);
901 } else if m.abi == Abi::PlatformIntrinsic {
902 for item in &m.items {
903 intrinsic::check_platform_intrinsic_type(ccx, item);
906 for item in &m.items {
907 let generics = ccx.tcx.item_generics(ccx.tcx.hir.local_def_id(item.id));
908 if !generics.types.is_empty() {
909 let mut err = struct_span_err!(ccx.tcx.sess, item.span, E0044,
910 "foreign items may not have type parameters");
911 span_help!(&mut err, item.span,
912 "consider using specialization instead of \
917 if let hir::ForeignItemFn(ref fn_decl, _, _) = item.node {
918 require_c_abi_if_variadic(ccx.tcx, fn_decl, m.abi, item.span);
923 _ => {/* nothing to do */ }
927 fn check_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
930 let generics = ccx.tcx.item_generics(def_id);
931 if let Some(ref attr) = item.attrs.iter().find(|a| {
932 a.check_name("rustc_on_unimplemented")
934 if let Some(istring) = attr.value_str() {
935 let istring = istring.as_str();
936 let parser = Parser::new(&istring);
937 let types = &generics.types;
938 for token in parser {
940 Piece::String(_) => (), // Normal string, no need to check it
941 Piece::NextArgument(a) => match a.position {
942 // `{Self}` is allowed
943 Position::ArgumentNamed(s) if s == "Self" => (),
944 // So is `{A}` if A is a type parameter
945 Position::ArgumentNamed(s) => match types.iter().find(|t| {
950 let name = ccx.tcx.item_name(def_id);
951 span_err!(ccx.tcx.sess, attr.span, E0230,
952 "there is no type parameter \
957 // `{:1}` and `{}` are not to be used
958 Position::ArgumentIs(_) => {
959 span_err!(ccx.tcx.sess, attr.span, E0231,
960 "only named substitution \
961 parameters are allowed");
968 ccx.tcx.sess, attr.span, E0232,
969 "this attribute must have a value")
970 .span_label(attr.span, &format!("attribute requires a value"))
971 .note(&format!("eg `#[rustc_on_unimplemented = \"foo\"]`"))
977 fn report_forbidden_specialization<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
978 impl_item: &hir::ImplItem,
981 let mut err = struct_span_err!(
982 tcx.sess, impl_item.span, E0520,
983 "`{}` specializes an item from a parent `impl`, but \
984 that item is not marked `default`",
986 err.span_label(impl_item.span, &format!("cannot specialize default item `{}`",
989 match tcx.span_of_impl(parent_impl) {
991 err.span_label(span, &"parent `impl` is here");
992 err.note(&format!("to specialize, `{}` in the parent `impl` must be marked `default`",
996 err.note(&format!("parent implementation is in crate `{}`", cname));
1003 fn check_specialization_validity<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
1004 trait_def: &ty::TraitDef,
1006 impl_item: &hir::ImplItem)
1008 let ancestors = trait_def.ancestors(impl_id);
1010 let kind = match impl_item.node {
1011 hir::ImplItemKind::Const(..) => ty::AssociatedKind::Const,
1012 hir::ImplItemKind::Method(..) => ty::AssociatedKind::Method,
1013 hir::ImplItemKind::Type(_) => ty::AssociatedKind::Type
1015 let parent = ancestors.defs(tcx, impl_item.name, kind).skip(1).next()
1016 .map(|node_item| node_item.map(|parent| parent.defaultness));
1018 if let Some(parent) = parent {
1019 if parent.item.is_final() {
1020 report_forbidden_specialization(tcx, impl_item, parent.node.def_id());
1026 fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
1029 impl_trait_ref: ty::TraitRef<'tcx>,
1030 impl_item_refs: &[hir::ImplItemRef]) {
1031 // If the trait reference itself is erroneous (so the compilation is going
1032 // to fail), skip checking the items here -- the `impl_item` table in `tcx`
1033 // isn't populated for such impls.
1034 if impl_trait_ref.references_error() { return; }
1036 // Locate trait definition and items
1038 let trait_def = tcx.lookup_trait_def(impl_trait_ref.def_id);
1039 let mut overridden_associated_type = None;
1041 let impl_items = || impl_item_refs.iter().map(|iiref| ccx.tcx.hir.impl_item(iiref.id));
1043 // Check existing impl methods to see if they are both present in trait
1044 // and compatible with trait signature
1045 for impl_item in impl_items() {
1046 let ty_impl_item = tcx.associated_item(tcx.hir.local_def_id(impl_item.id));
1047 let ty_trait_item = tcx.associated_items(impl_trait_ref.def_id)
1048 .find(|ac| ac.name == ty_impl_item.name);
1050 // Check that impl definition matches trait definition
1051 if let Some(ty_trait_item) = ty_trait_item {
1052 match impl_item.node {
1053 hir::ImplItemKind::Const(..) => {
1054 // Find associated const definition.
1055 if ty_trait_item.kind == ty::AssociatedKind::Const {
1056 compare_const_impl(ccx,
1062 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0323,
1063 "item `{}` is an associated const, \
1064 which doesn't match its trait `{}`",
1067 err.span_label(impl_item.span, &format!("does not match trait"));
1068 // We can only get the spans from local trait definition
1069 // Same for E0324 and E0325
1070 if let Some(trait_span) = tcx.hir.span_if_local(ty_trait_item.def_id) {
1071 err.span_label(trait_span, &format!("item in trait"));
1076 hir::ImplItemKind::Method(_, body_id) => {
1077 let trait_span = tcx.hir.span_if_local(ty_trait_item.def_id);
1078 if ty_trait_item.kind == ty::AssociatedKind::Method {
1079 let err_count = tcx.sess.err_count();
1080 compare_impl_method(ccx,
1087 true); // start with old-broken-mode
1088 if err_count == tcx.sess.err_count() {
1089 // old broken mode did not report an error. Try with the new mode.
1090 compare_impl_method(ccx,
1097 false); // use the new mode
1100 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0324,
1101 "item `{}` is an associated method, \
1102 which doesn't match its trait `{}`",
1105 err.span_label(impl_item.span, &format!("does not match trait"));
1106 if let Some(trait_span) = tcx.hir.span_if_local(ty_trait_item.def_id) {
1107 err.span_label(trait_span, &format!("item in trait"));
1112 hir::ImplItemKind::Type(_) => {
1113 if ty_trait_item.kind == ty::AssociatedKind::Type {
1114 if ty_trait_item.defaultness.has_value() {
1115 overridden_associated_type = Some(impl_item);
1118 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0325,
1119 "item `{}` is an associated type, \
1120 which doesn't match its trait `{}`",
1123 err.span_label(impl_item.span, &format!("does not match trait"));
1124 if let Some(trait_span) = tcx.hir.span_if_local(ty_trait_item.def_id) {
1125 err.span_label(trait_span, &format!("item in trait"));
1133 check_specialization_validity(tcx, trait_def, impl_id, impl_item);
1136 // Check for missing items from trait
1137 let mut missing_items = Vec::new();
1138 let mut invalidated_items = Vec::new();
1139 let associated_type_overridden = overridden_associated_type.is_some();
1140 for trait_item in tcx.associated_items(impl_trait_ref.def_id) {
1141 let is_implemented = trait_def.ancestors(impl_id)
1142 .defs(tcx, trait_item.name, trait_item.kind)
1144 .map(|node_item| !node_item.node.is_from_trait())
1147 if !is_implemented {
1148 if !trait_item.defaultness.has_value() {
1149 missing_items.push(trait_item);
1150 } else if associated_type_overridden {
1151 invalidated_items.push(trait_item.name);
1156 let signature = |item: &ty::AssociatedItem| {
1158 ty::AssociatedKind::Method => {
1159 format!("{}", tcx.item_type(item.def_id).fn_sig().0)
1161 ty::AssociatedKind::Type => format!("type {};", item.name.to_string()),
1162 ty::AssociatedKind::Const => {
1163 format!("const {}: {:?};", item.name.to_string(), tcx.item_type(item.def_id))
1168 if !missing_items.is_empty() {
1169 let mut err = struct_span_err!(tcx.sess, impl_span, E0046,
1170 "not all trait items implemented, missing: `{}`",
1171 missing_items.iter()
1172 .map(|trait_item| trait_item.name.to_string())
1173 .collect::<Vec<_>>().join("`, `"));
1174 err.span_label(impl_span, &format!("missing `{}` in implementation",
1175 missing_items.iter()
1176 .map(|trait_item| trait_item.name.to_string())
1177 .collect::<Vec<_>>().join("`, `")));
1178 for trait_item in missing_items {
1179 if let Some(span) = tcx.hir.span_if_local(trait_item.def_id) {
1180 err.span_label(span, &format!("`{}` from trait", trait_item.name));
1182 err.note(&format!("`{}` from trait: `{}`",
1184 signature(&trait_item)));
1190 if !invalidated_items.is_empty() {
1191 let invalidator = overridden_associated_type.unwrap();
1192 span_err!(tcx.sess, invalidator.span, E0399,
1193 "the following trait items need to be reimplemented \
1194 as `{}` was overridden: `{}`",
1196 invalidated_items.iter()
1197 .map(|name| name.to_string())
1198 .collect::<Vec<_>>().join("`, `"))
1202 /// Checks a constant with a given type.
1203 fn check_const_with_type<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
1205 expected_type: Ty<'tcx>,
1207 let body = ccx.tcx.hir.body(body);
1208 ccx.inherited(id).enter(|inh| {
1209 let fcx = FnCtxt::new(&inh, None, body.value.id);
1210 fcx.require_type_is_sized(expected_type, body.value.span, traits::ConstSized);
1212 // Gather locals in statics (because of block expressions).
1213 // This is technically unnecessary because locals in static items are forbidden,
1214 // but prevents type checking from blowing up before const checking can properly
1216 GatherLocalsVisitor { fcx: &fcx }.visit_body(body);
1218 fcx.check_expr_coercable_to_type(&body.value, expected_type);
1220 fcx.select_all_obligations_and_apply_defaults();
1221 fcx.closure_analyze(body);
1222 fcx.select_obligations_where_possible();
1224 fcx.select_all_obligations_or_error();
1226 fcx.regionck_expr(body);
1227 fcx.resolve_type_vars_in_body(body);
1231 fn check_const<'a, 'tcx>(ccx: &CrateCtxt<'a,'tcx>,
1234 let decl_ty = ccx.tcx.item_type(ccx.tcx.hir.local_def_id(id));
1235 check_const_with_type(ccx, body, decl_ty, id);
1238 /// Checks whether a type can be represented in memory. In particular, it
1239 /// identifies types that contain themselves without indirection through a
1240 /// pointer, which would mean their size is unbounded.
1241 fn check_representable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
1245 let rty = tcx.item_type(item_def_id);
1247 // Check that it is possible to represent this type. This call identifies
1248 // (1) types that contain themselves and (2) types that contain a different
1249 // recursive type. It is only necessary to throw an error on those that
1250 // contain themselves. For case 2, there must be an inner type that will be
1251 // caught by case 1.
1252 match rty.is_representable(tcx, sp) {
1253 Representability::SelfRecursive => {
1254 tcx.recursive_type_with_infinite_size_error(item_def_id).emit();
1257 Representability::Representable | Representability::ContainsRecursive => (),
1262 pub fn check_simd<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span, def_id: DefId) {
1263 let t = tcx.item_type(def_id);
1265 ty::TyAdt(def, substs) if def.is_struct() => {
1266 let fields = &def.struct_variant().fields;
1267 if fields.is_empty() {
1268 span_err!(tcx.sess, sp, E0075, "SIMD vector cannot be empty");
1271 let e = fields[0].ty(tcx, substs);
1272 if !fields.iter().all(|f| f.ty(tcx, substs) == e) {
1273 struct_span_err!(tcx.sess, sp, E0076, "SIMD vector should be homogeneous")
1274 .span_label(sp, &format!("SIMD elements must have the same type"))
1279 ty::TyParam(_) => { /* struct<T>(T, T, T, T) is ok */ }
1280 _ if e.is_machine() => { /* struct(u8, u8, u8, u8) is ok */ }
1282 span_err!(tcx.sess, sp, E0077,
1283 "SIMD vector element type should be machine type");
1292 #[allow(trivial_numeric_casts)]
1293 pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
1295 vs: &'tcx [hir::Variant],
1297 let def_id = ccx.tcx.hir.local_def_id(id);
1298 let hint = *ccx.tcx.lookup_repr_hints(def_id).get(0).unwrap_or(&attr::ReprAny);
1300 if hint != attr::ReprAny && vs.is_empty() {
1302 ccx.tcx.sess, sp, E0084,
1303 "unsupported representation for zero-variant enum")
1304 .span_label(sp, &format!("unsupported enum representation"))
1308 let repr_type_ty = ccx.tcx.enum_repr_type(Some(&hint)).to_ty(ccx.tcx);
1309 if repr_type_ty == ccx.tcx.types.i128 || repr_type_ty == ccx.tcx.types.u128 {
1310 if !ccx.tcx.sess.features.borrow().i128_type {
1311 emit_feature_err(&ccx.tcx.sess.parse_sess,
1312 "i128_type", sp, GateIssue::Language, "128-bit type is unstable");
1317 if let Some(e) = v.node.disr_expr {
1318 check_const_with_type(ccx, e, repr_type_ty, e.node_id);
1322 let def_id = ccx.tcx.hir.local_def_id(id);
1324 let variants = &ccx.tcx.lookup_adt_def(def_id).variants;
1325 let mut disr_vals: Vec<ty::Disr> = Vec::new();
1326 for (v, variant) in vs.iter().zip(variants.iter()) {
1327 let current_disr_val = variant.disr_val;
1329 // Check for duplicate discriminant values
1330 if let Some(i) = disr_vals.iter().position(|&x| x == current_disr_val) {
1331 let variant_i_node_id = ccx.tcx.hir.as_local_node_id(variants[i].did).unwrap();
1332 let variant_i = ccx.tcx.hir.expect_variant(variant_i_node_id);
1333 let i_span = match variant_i.node.disr_expr {
1334 Some(expr) => ccx.tcx.hir.span(expr.node_id),
1335 None => ccx.tcx.hir.span(variant_i_node_id)
1337 let span = match v.node.disr_expr {
1338 Some(expr) => ccx.tcx.hir.span(expr.node_id),
1341 struct_span_err!(ccx.tcx.sess, span, E0081,
1342 "discriminant value `{}` already exists", disr_vals[i])
1343 .span_label(i_span, &format!("first use of `{}`", disr_vals[i]))
1344 .span_label(span , &format!("enum already has `{}`", disr_vals[i]))
1347 disr_vals.push(current_disr_val);
1350 check_representable(ccx.tcx, sp, def_id);
1353 impl<'a, 'gcx, 'tcx> AstConv<'gcx, 'tcx> for FnCtxt<'a, 'gcx, 'tcx> {
1354 fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
1356 fn ast_ty_to_ty_cache(&self) -> &RefCell<NodeMap<Ty<'tcx>>> {
1357 &self.ast_ty_to_ty_cache
1360 fn get_generics(&self, _: Span, id: DefId)
1361 -> Result<&'tcx ty::Generics<'tcx>, ErrorReported>
1363 Ok(self.tcx().item_generics(id))
1366 fn get_item_type(&self, _: Span, id: DefId) -> Result<Ty<'tcx>, ErrorReported>
1368 Ok(self.tcx().item_type(id))
1371 fn get_trait_def(&self, _: Span, id: DefId)
1372 -> Result<&'tcx ty::TraitDef, ErrorReported>
1374 Ok(self.tcx().lookup_trait_def(id))
1377 fn ensure_super_predicates(&self, _: Span, _: DefId) -> Result<(), ErrorReported> {
1378 // all super predicates are ensured during collect pass
1382 fn get_free_substs(&self) -> Option<&Substs<'tcx>> {
1383 Some(&self.parameter_environment.free_substs)
1386 fn get_type_parameter_bounds(&self,
1388 node_id: ast::NodeId)
1389 -> Result<Vec<ty::PolyTraitRef<'tcx>>, ErrorReported>
1391 let def = self.tcx.type_parameter_def(node_id);
1392 let r = self.parameter_environment
1395 .filter_map(|predicate| {
1397 ty::Predicate::Trait(ref data) => {
1398 if data.0.self_ty().is_param(def.index) {
1399 Some(data.to_poly_trait_ref())
1413 fn ty_infer(&self, span: Span) -> Ty<'tcx> {
1414 self.next_ty_var(TypeVariableOrigin::TypeInference(span))
1417 fn ty_infer_for_def(&self,
1418 ty_param_def: &ty::TypeParameterDef<'tcx>,
1419 substs: &[Kind<'tcx>],
1420 span: Span) -> Ty<'tcx> {
1421 self.type_var_for_def(span, ty_param_def, substs)
1424 fn projected_ty_from_poly_trait_ref(&self,
1426 poly_trait_ref: ty::PolyTraitRef<'tcx>,
1427 item_name: ast::Name)
1430 let (trait_ref, _) =
1431 self.replace_late_bound_regions_with_fresh_var(
1433 infer::LateBoundRegionConversionTime::AssocTypeProjection(item_name),
1436 self.normalize_associated_type(span, trait_ref, item_name)
1439 fn projected_ty(&self,
1441 trait_ref: ty::TraitRef<'tcx>,
1442 item_name: ast::Name)
1445 self.normalize_associated_type(span, trait_ref, item_name)
1448 fn set_tainted_by_errors(&self) {
1449 self.infcx.set_tainted_by_errors()
1453 impl<'a, 'gcx, 'tcx> RegionScope for FnCtxt<'a, 'gcx, 'tcx> {
1454 fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> {
1455 Some(self.base_object_lifetime_default(span))
1458 fn base_object_lifetime_default(&self, span: Span) -> ty::Region {
1459 // RFC #599 specifies that object lifetime defaults take
1460 // precedence over other defaults. But within a fn body we
1461 // don't have a *default* region, rather we use inference to
1462 // find the *correct* region, which is strictly more general
1463 // (and anyway, within a fn body the right region may not even
1464 // be something the user can write explicitly, since it might
1465 // be some expression).
1466 *self.next_region_var(infer::MiscVariable(span))
1469 fn anon_region(&self, span: Span, def: Option<&ty::RegionParameterDef>)
1470 -> Result<ty::Region, Option<Vec<ElisionFailureInfo>>> {
1472 Some(def) => infer::EarlyBoundRegion(span, def.name),
1473 None => infer::MiscVariable(span)
1475 Ok(*self.next_region_var(v))
1479 /// Controls whether the arguments are tupled. This is used for the call
1482 /// Tupling means that all call-side arguments are packed into a tuple and
1483 /// passed as a single parameter. For example, if tupling is enabled, this
1486 /// fn f(x: (isize, isize))
1488 /// Can be called as:
1495 #[derive(Clone, Eq, PartialEq)]
1496 enum TupleArgumentsFlag {
1501 impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
1502 pub fn new(inh: &'a Inherited<'a, 'gcx, 'tcx>,
1503 rty: Option<Ty<'tcx>>,
1504 body_id: ast::NodeId)
1505 -> FnCtxt<'a, 'gcx, 'tcx> {
1507 ast_ty_to_ty_cache: RefCell::new(NodeMap()),
1509 writeback_errors: Cell::new(false),
1510 err_count_on_creation: inh.tcx.sess.err_count(),
1512 ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal,
1513 ast::CRATE_NODE_ID)),
1514 diverges: Cell::new(Diverges::Maybe),
1515 has_errors: Cell::new(false),
1516 enclosing_loops: RefCell::new(EnclosingLoops {
1524 pub fn param_env(&self) -> &ty::ParameterEnvironment<'gcx> {
1525 &self.parameter_environment
1528 pub fn sess(&self) -> &Session {
1532 pub fn err_count_since_creation(&self) -> usize {
1533 self.tcx.sess.err_count() - self.err_count_on_creation
1536 /// Produce warning on the given node, if the current point in the
1537 /// function is unreachable, and there hasn't been another warning.
1538 fn warn_if_unreachable(&self, id: ast::NodeId, span: Span, kind: &str) {
1539 if self.diverges.get() == Diverges::Always {
1540 self.diverges.set(Diverges::WarnedAlways);
1542 self.tcx.sess.add_lint(lint::builtin::UNREACHABLE_CODE,
1544 format!("unreachable {}", kind));
1550 code: ObligationCauseCode<'tcx>)
1551 -> ObligationCause<'tcx> {
1552 ObligationCause::new(span, self.body_id, code)
1555 pub fn misc(&self, span: Span) -> ObligationCause<'tcx> {
1556 self.cause(span, ObligationCauseCode::MiscObligation)
1559 /// Resolves type variables in `ty` if possible. Unlike the infcx
1560 /// version (resolve_type_vars_if_possible), this version will
1561 /// also select obligations if it seems useful, in an effort
1562 /// to get more type information.
1563 fn resolve_type_vars_with_obligations(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> {
1564 debug!("resolve_type_vars_with_obligations(ty={:?})", ty);
1566 // No TyInfer()? Nothing needs doing.
1567 if !ty.has_infer_types() {
1568 debug!("resolve_type_vars_with_obligations: ty={:?}", ty);
1572 // If `ty` is a type variable, see whether we already know what it is.
1573 ty = self.resolve_type_vars_if_possible(&ty);
1574 if !ty.has_infer_types() {
1575 debug!("resolve_type_vars_with_obligations: ty={:?}", ty);
1579 // If not, try resolving pending obligations as much as
1580 // possible. This can help substantially when there are
1581 // indirect dependencies that don't seem worth tracking
1583 self.select_obligations_where_possible();
1584 ty = self.resolve_type_vars_if_possible(&ty);
1586 debug!("resolve_type_vars_with_obligations: ty={:?}", ty);
1590 fn record_deferred_call_resolution(&self,
1591 closure_def_id: DefId,
1592 r: DeferredCallResolutionHandler<'gcx, 'tcx>) {
1593 let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
1594 deferred_call_resolutions.entry(closure_def_id).or_insert(vec![]).push(r);
1597 fn remove_deferred_call_resolutions(&self,
1598 closure_def_id: DefId)
1599 -> Vec<DeferredCallResolutionHandler<'gcx, 'tcx>>
1601 let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
1602 deferred_call_resolutions.remove(&closure_def_id).unwrap_or(Vec::new())
1605 pub fn tag(&self) -> String {
1606 let self_ptr: *const FnCtxt = self;
1607 format!("{:?}", self_ptr)
1610 pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> Ty<'tcx> {
1611 match self.locals.borrow().get(&nid) {
1614 span_bug!(span, "no type for local variable {}",
1615 self.tcx.hir.node_to_string(nid));
1621 pub fn write_ty(&self, node_id: ast::NodeId, ty: Ty<'tcx>) {
1622 debug!("write_ty({}, {:?}) in fcx {}",
1623 node_id, ty, self.tag());
1624 self.tables.borrow_mut().node_types.insert(node_id, ty);
1626 if ty.references_error() {
1627 self.has_errors.set(true);
1630 // FIXME(canndrew): This is_never should probably be an is_uninhabited
1631 if ty.is_never() || self.type_var_diverges(ty) {
1632 self.diverges.set(self.diverges.get() | Diverges::Always);
1636 pub fn write_substs(&self, node_id: ast::NodeId, substs: ty::ItemSubsts<'tcx>) {
1637 if !substs.substs.is_noop() {
1638 debug!("write_substs({}, {:?}) in fcx {}",
1643 self.tables.borrow_mut().item_substs.insert(node_id, substs);
1647 pub fn write_autoderef_adjustment(&self,
1648 node_id: ast::NodeId,
1650 adjusted_ty: Ty<'tcx>) {
1651 self.write_adjustment(node_id, adjustment::Adjustment {
1652 kind: adjustment::Adjust::DerefRef {
1661 pub fn write_adjustment(&self,
1662 node_id: ast::NodeId,
1663 adj: adjustment::Adjustment<'tcx>) {
1664 debug!("write_adjustment(node_id={}, adj={:?})", node_id, adj);
1666 if adj.is_identity() {
1670 self.tables.borrow_mut().adjustments.insert(node_id, adj);
1673 /// Basically whenever we are converting from a type scheme into
1674 /// the fn body space, we always want to normalize associated
1675 /// types as well. This function combines the two.
1676 fn instantiate_type_scheme<T>(&self,
1678 substs: &Substs<'tcx>,
1681 where T : TypeFoldable<'tcx>
1683 let value = value.subst(self.tcx, substs);
1684 let result = self.normalize_associated_types_in(span, &value);
1685 debug!("instantiate_type_scheme(value={:?}, substs={:?}) = {:?}",
1692 /// As `instantiate_type_scheme`, but for the bounds found in a
1693 /// generic type scheme.
1694 fn instantiate_bounds(&self, span: Span, def_id: DefId, substs: &Substs<'tcx>)
1695 -> ty::InstantiatedPredicates<'tcx> {
1696 let bounds = self.tcx.item_predicates(def_id);
1697 let result = bounds.instantiate(self.tcx, substs);
1698 let result = self.normalize_associated_types_in(span, &result.predicates);
1699 debug!("instantiate_bounds(bounds={:?}, substs={:?}) = {:?}",
1703 ty::InstantiatedPredicates {
1708 /// Replace all anonymized types with fresh inference variables
1709 /// and record them for writeback.
1710 fn instantiate_anon_types<T: TypeFoldable<'tcx>>(&self, value: &T) -> T {
1711 value.fold_with(&mut BottomUpFolder { tcx: self.tcx, fldop: |ty| {
1712 if let ty::TyAnon(def_id, substs) = ty.sty {
1713 // Use the same type variable if the exact same TyAnon appears more
1714 // than once in the return type (e.g. if it's pased to a type alias).
1715 if let Some(ty_var) = self.anon_types.borrow().get(&def_id) {
1718 let span = self.tcx.def_span(def_id);
1719 let ty_var = self.next_ty_var(TypeVariableOrigin::TypeInference(span));
1720 self.anon_types.borrow_mut().insert(def_id, ty_var);
1722 let item_predicates = self.tcx.item_predicates(def_id);
1723 let bounds = item_predicates.instantiate(self.tcx, substs);
1725 for predicate in bounds.predicates {
1726 // Change the predicate to refer to the type variable,
1727 // which will be the concrete type, instead of the TyAnon.
1728 // This also instantiates nested `impl Trait`.
1729 let predicate = self.instantiate_anon_types(&predicate);
1731 // Require that the predicate holds for the concrete type.
1732 let cause = traits::ObligationCause::new(span, self.body_id,
1733 traits::ReturnType);
1734 self.register_predicate(traits::Obligation::new(cause, predicate));
1744 fn normalize_associated_types_in<T>(&self, span: Span, value: &T) -> T
1745 where T : TypeFoldable<'tcx>
1747 self.inh.normalize_associated_types_in(span, self.body_id, value)
1750 fn normalize_associated_type(&self,
1752 trait_ref: ty::TraitRef<'tcx>,
1753 item_name: ast::Name)
1756 let cause = traits::ObligationCause::new(span,
1758 traits::ObligationCauseCode::MiscObligation);
1761 .normalize_projection_type(self,
1763 trait_ref: trait_ref,
1764 item_name: item_name,
1769 pub fn write_nil(&self, node_id: ast::NodeId) {
1770 self.write_ty(node_id, self.tcx.mk_nil());
1773 pub fn write_never(&self, node_id: ast::NodeId) {
1774 self.write_ty(node_id, self.tcx.types.never);
1777 pub fn write_error(&self, node_id: ast::NodeId) {
1778 self.write_ty(node_id, self.tcx.types.err);
1781 pub fn require_type_meets(&self,
1784 code: traits::ObligationCauseCode<'tcx>,
1787 self.register_bound(
1790 traits::ObligationCause::new(span, self.body_id, code));
1793 pub fn require_type_is_sized(&self,
1796 code: traits::ObligationCauseCode<'tcx>)
1798 let lang_item = self.tcx.require_lang_item(lang_items::SizedTraitLangItem);
1799 self.require_type_meets(ty, span, code, lang_item);
1802 pub fn register_bound(&self,
1805 cause: traits::ObligationCause<'tcx>)
1807 self.fulfillment_cx.borrow_mut()
1808 .register_bound(self, ty, def_id, cause);
1811 pub fn register_predicate(&self,
1812 obligation: traits::PredicateObligation<'tcx>)
1814 debug!("register_predicate({:?})",
1818 .register_predicate_obligation(self, obligation);
1821 pub fn register_predicates(&self,
1822 obligations: Vec<traits::PredicateObligation<'tcx>>)
1824 for obligation in obligations {
1825 self.register_predicate(obligation);
1829 pub fn register_infer_ok_obligations<T>(&self, infer_ok: InferOk<'tcx, T>) -> T {
1830 self.register_predicates(infer_ok.obligations);
1834 pub fn to_ty(&self, ast_t: &hir::Ty) -> Ty<'tcx> {
1835 let t = AstConv::ast_ty_to_ty(self, self, ast_t);
1836 self.register_wf_obligation(t, ast_t.span, traits::MiscObligation);
1840 pub fn node_ty(&self, id: ast::NodeId) -> Ty<'tcx> {
1841 match self.tables.borrow().node_types.get(&id) {
1843 None if self.err_count_since_creation() != 0 => self.tcx.types.err,
1845 bug!("no type for node {}: {} in fcx {}",
1846 id, self.tcx.hir.node_to_string(id),
1852 pub fn opt_node_ty_substs<F>(&self,
1855 F: FnOnce(&ty::ItemSubsts<'tcx>),
1857 if let Some(s) = self.tables.borrow().item_substs.get(&id) {
1862 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1863 /// outlive the region `r`.
1864 pub fn register_region_obligation(&self,
1866 region: &'tcx ty::Region,
1867 cause: traits::ObligationCause<'tcx>)
1869 let mut fulfillment_cx = self.fulfillment_cx.borrow_mut();
1870 fulfillment_cx.register_region_obligation(ty, region, cause);
1873 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1874 /// outlive the region `r`.
1875 pub fn register_wf_obligation(&self,
1878 code: traits::ObligationCauseCode<'tcx>)
1880 // WF obligations never themselves fail, so no real need to give a detailed cause:
1881 let cause = traits::ObligationCause::new(span, self.body_id, code);
1882 self.register_predicate(traits::Obligation::new(cause, ty::Predicate::WellFormed(ty)));
1885 pub fn register_old_wf_obligation(&self,
1888 code: traits::ObligationCauseCode<'tcx>)
1890 // Registers an "old-style" WF obligation that uses the
1891 // implicator code. This is basically a buggy version of
1892 // `register_wf_obligation` that is being kept around
1893 // temporarily just to help with phasing in the newer rules.
1895 // FIXME(#27579) all uses of this should be migrated to register_wf_obligation eventually
1896 let cause = traits::ObligationCause::new(span, self.body_id, code);
1897 self.register_region_obligation(ty, self.tcx.mk_region(ty::ReEmpty), cause);
1900 /// Registers obligations that all types appearing in `substs` are well-formed.
1901 pub fn add_wf_bounds(&self, substs: &Substs<'tcx>, expr: &hir::Expr)
1903 for ty in substs.types() {
1904 self.register_wf_obligation(ty, expr.span, traits::MiscObligation);
1908 /// Given a fully substituted set of bounds (`generic_bounds`), and the values with which each
1909 /// type/region parameter was instantiated (`substs`), creates and registers suitable
1910 /// trait/region obligations.
1912 /// For example, if there is a function:
1915 /// fn foo<'a,T:'a>(...)
1918 /// and a reference:
1924 /// Then we will create a fresh region variable `'$0` and a fresh type variable `$1` for `'a`
1925 /// and `T`. This routine will add a region obligation `$1:'$0` and register it locally.
1926 pub fn add_obligations_for_parameters(&self,
1927 cause: traits::ObligationCause<'tcx>,
1928 predicates: &ty::InstantiatedPredicates<'tcx>)
1930 assert!(!predicates.has_escaping_regions());
1932 debug!("add_obligations_for_parameters(predicates={:?})",
1935 for obligation in traits::predicates_for_generics(cause, predicates) {
1936 self.register_predicate(obligation);
1940 // FIXME(arielb1): use this instead of field.ty everywhere
1941 // Only for fields! Returns <none> for methods>
1942 // Indifferent to privacy flags
1943 pub fn field_ty(&self,
1945 field: &'tcx ty::FieldDef,
1946 substs: &Substs<'tcx>)
1949 self.normalize_associated_types_in(span,
1950 &field.ty(self.tcx, substs))
1953 fn check_casts(&self) {
1954 let mut deferred_cast_checks = self.deferred_cast_checks.borrow_mut();
1955 for cast in deferred_cast_checks.drain(..) {
1960 /// Apply "fallbacks" to some types
1961 /// ! gets replaced with (), unconstrained ints with i32, and unconstrained floats with f64.
1962 fn default_type_parameters(&self) {
1963 use rustc::ty::error::UnconstrainedNumeric::Neither;
1964 use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1966 // Defaulting inference variables becomes very dubious if we have
1967 // encountered type-checking errors. Therefore, if we think we saw
1968 // some errors in this function, just resolve all uninstanted type
1969 // varibles to TyError.
1970 if self.is_tainted_by_errors() {
1971 for ty in &self.unsolved_variables() {
1972 if let ty::TyInfer(_) = self.shallow_resolve(ty).sty {
1973 debug!("default_type_parameters: defaulting `{:?}` to error", ty);
1974 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx().types.err);
1980 for ty in &self.unsolved_variables() {
1981 let resolved = self.resolve_type_vars_if_possible(ty);
1982 if self.type_var_diverges(resolved) {
1983 debug!("default_type_parameters: defaulting `{:?}` to `!` because it diverges",
1985 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
1986 self.tcx.mk_diverging_default());
1988 match self.type_is_unconstrained_numeric(resolved) {
1989 UnconstrainedInt => {
1990 debug!("default_type_parameters: defaulting `{:?}` to `i32`",
1992 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
1994 UnconstrainedFloat => {
1995 debug!("default_type_parameters: defaulting `{:?}` to `f32`",
1997 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
2005 fn select_all_obligations_and_apply_defaults(&self) {
2006 if self.tcx.sess.features.borrow().default_type_parameter_fallback {
2007 self.new_select_all_obligations_and_apply_defaults();
2009 self.old_select_all_obligations_and_apply_defaults();
2013 // Implements old type inference fallback algorithm
2014 fn old_select_all_obligations_and_apply_defaults(&self) {
2015 self.select_obligations_where_possible();
2016 self.default_type_parameters();
2017 self.select_obligations_where_possible();
2020 fn new_select_all_obligations_and_apply_defaults(&self) {
2021 use rustc::ty::error::UnconstrainedNumeric::Neither;
2022 use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
2024 // For the time being this errs on the side of being memory wasteful but provides better
2026 // let type_variables = self.type_variables.clone();
2028 // There is a possibility that this algorithm will have to run an arbitrary number of times
2029 // to terminate so we bound it by the compiler's recursion limit.
2030 for _ in 0..self.tcx.sess.recursion_limit.get() {
2031 // First we try to solve all obligations, it is possible that the last iteration
2032 // has made it possible to make more progress.
2033 self.select_obligations_where_possible();
2035 let mut conflicts = Vec::new();
2037 // Collect all unsolved type, integral and floating point variables.
2038 let unsolved_variables = self.unsolved_variables();
2040 // We must collect the defaults *before* we do any unification. Because we have
2041 // directly attached defaults to the type variables any unification that occurs
2042 // will erase defaults causing conflicting defaults to be completely ignored.
2043 let default_map: FxHashMap<_, _> =
2046 .filter_map(|t| self.default(t).map(|d| (t, d)))
2049 let mut unbound_tyvars = FxHashSet();
2051 debug!("select_all_obligations_and_apply_defaults: defaults={:?}", default_map);
2053 // We loop over the unsolved variables, resolving them and if they are
2054 // and unconstrainted numeric type we add them to the set of unbound
2055 // variables. We do this so we only apply literal fallback to type
2056 // variables without defaults.
2057 for ty in &unsolved_variables {
2058 let resolved = self.resolve_type_vars_if_possible(ty);
2059 if self.type_var_diverges(resolved) {
2060 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
2061 self.tcx.mk_diverging_default());
2063 match self.type_is_unconstrained_numeric(resolved) {
2064 UnconstrainedInt | UnconstrainedFloat => {
2065 unbound_tyvars.insert(resolved);
2072 // We now remove any numeric types that also have defaults, and instead insert
2073 // the type variable with a defined fallback.
2074 for ty in &unsolved_variables {
2075 if let Some(_default) = default_map.get(ty) {
2076 let resolved = self.resolve_type_vars_if_possible(ty);
2078 debug!("select_all_obligations_and_apply_defaults: \
2079 ty: {:?} with default: {:?}",
2082 match resolved.sty {
2083 ty::TyInfer(ty::TyVar(_)) => {
2084 unbound_tyvars.insert(ty);
2087 ty::TyInfer(ty::IntVar(_)) | ty::TyInfer(ty::FloatVar(_)) => {
2088 unbound_tyvars.insert(ty);
2089 if unbound_tyvars.contains(resolved) {
2090 unbound_tyvars.remove(resolved);
2099 // If there are no more fallbacks to apply at this point we have applied all possible
2100 // defaults and type inference will proceed as normal.
2101 if unbound_tyvars.is_empty() {
2105 // Finally we go through each of the unbound type variables and unify them with
2106 // the proper fallback, reporting a conflicting default error if any of the
2107 // unifications fail. We know it must be a conflicting default because the
2108 // variable would only be in `unbound_tyvars` and have a concrete value if
2109 // it had been solved by previously applying a default.
2111 // We wrap this in a transaction for error reporting, if we detect a conflict
2112 // we will rollback the inference context to its prior state so we can probe
2113 // for conflicts and correctly report them.
2116 let _ = self.commit_if_ok(|_: &infer::CombinedSnapshot| {
2117 for ty in &unbound_tyvars {
2118 if self.type_var_diverges(ty) {
2119 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
2120 self.tcx.mk_diverging_default());
2122 match self.type_is_unconstrained_numeric(ty) {
2123 UnconstrainedInt => {
2124 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
2126 UnconstrainedFloat => {
2127 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
2130 if let Some(default) = default_map.get(ty) {
2131 let default = default.clone();
2132 match self.eq_types(false,
2133 &self.misc(default.origin_span),
2136 Ok(ok) => self.register_infer_ok_obligations(ok),
2137 Err(_) => conflicts.push((*ty, default)),
2145 // If there are conflicts we rollback, otherwise commit
2146 if conflicts.len() > 0 {
2153 if conflicts.len() > 0 {
2154 // Loop through each conflicting default, figuring out the default that caused
2155 // a unification failure and then report an error for each.
2156 for (conflict, default) in conflicts {
2157 let conflicting_default =
2158 self.find_conflicting_default(&unbound_tyvars, &default_map, conflict)
2159 .unwrap_or(type_variable::Default {
2160 ty: self.next_ty_var(
2161 TypeVariableOrigin::MiscVariable(syntax_pos::DUMMY_SP)),
2162 origin_span: syntax_pos::DUMMY_SP,
2163 // what do I put here?
2164 def_id: self.tcx.hir.local_def_id(ast::CRATE_NODE_ID)
2167 // This is to ensure that we elimnate any non-determinism from the error
2168 // reporting by fixing an order, it doesn't matter what order we choose
2169 // just that it is consistent.
2170 let (first_default, second_default) =
2171 if default.def_id < conflicting_default.def_id {
2172 (default, conflicting_default)
2174 (conflicting_default, default)
2178 self.report_conflicting_default_types(
2179 first_default.origin_span,
2187 self.select_obligations_where_possible();
2190 // For use in error handling related to default type parameter fallback. We explicitly
2191 // apply the default that caused conflict first to a local version of the type variable
2192 // table then apply defaults until we find a conflict. That default must be the one
2193 // that caused conflict earlier.
2194 fn find_conflicting_default(&self,
2195 unbound_vars: &FxHashSet<Ty<'tcx>>,
2196 default_map: &FxHashMap<&Ty<'tcx>, type_variable::Default<'tcx>>,
2198 -> Option<type_variable::Default<'tcx>> {
2199 use rustc::ty::error::UnconstrainedNumeric::Neither;
2200 use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
2202 // Ensure that we apply the conflicting default first
2203 let mut unbound_tyvars = Vec::with_capacity(unbound_vars.len() + 1);
2204 unbound_tyvars.push(conflict);
2205 unbound_tyvars.extend(unbound_vars.iter());
2207 let mut result = None;
2208 // We run the same code as above applying defaults in order, this time when
2209 // we find the conflict we just return it for error reporting above.
2211 // We also run this inside snapshot that never commits so we can do error
2212 // reporting for more then one conflict.
2213 for ty in &unbound_tyvars {
2214 if self.type_var_diverges(ty) {
2215 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
2216 self.tcx.mk_diverging_default());
2218 match self.type_is_unconstrained_numeric(ty) {
2219 UnconstrainedInt => {
2220 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
2222 UnconstrainedFloat => {
2223 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
2226 if let Some(default) = default_map.get(ty) {
2227 let default = default.clone();
2228 match self.eq_types(false,
2229 &self.misc(default.origin_span),
2232 Ok(ok) => self.register_infer_ok_obligations(ok),
2234 result = Some(default);
2246 fn select_all_obligations_or_error(&self) {
2247 debug!("select_all_obligations_or_error");
2249 // upvar inference should have ensured that all deferred call
2250 // resolutions are handled by now.
2251 assert!(self.deferred_call_resolutions.borrow().is_empty());
2253 self.select_all_obligations_and_apply_defaults();
2255 let mut fulfillment_cx = self.fulfillment_cx.borrow_mut();
2257 // Steal the deferred obligations before the fulfillment
2258 // context can turn all of them into errors.
2259 let obligations = fulfillment_cx.take_deferred_obligations();
2260 self.deferred_obligations.borrow_mut().extend(obligations);
2262 match fulfillment_cx.select_all_or_error(self) {
2264 Err(errors) => { self.report_fulfillment_errors(&errors); }
2268 /// Select as many obligations as we can at present.
2269 fn select_obligations_where_possible(&self) {
2270 match self.fulfillment_cx.borrow_mut().select_where_possible(self) {
2272 Err(errors) => { self.report_fulfillment_errors(&errors); }
2276 /// For the overloaded lvalue expressions (`*x`, `x[3]`), the trait
2277 /// returns a type of `&T`, but the actual type we assign to the
2278 /// *expression* is `T`. So this function just peels off the return
2279 /// type by one layer to yield `T`.
2280 fn make_overloaded_lvalue_return_type(&self,
2281 method: MethodCallee<'tcx>)
2282 -> ty::TypeAndMut<'tcx>
2284 // extract method return type, which will be &T;
2285 // all LB regions should have been instantiated during method lookup
2286 let ret_ty = method.ty.fn_ret();
2287 let ret_ty = self.tcx.no_late_bound_regions(&ret_ty).unwrap();
2289 // method returns &T, but the type as visible to user is T, so deref
2290 ret_ty.builtin_deref(true, NoPreference).unwrap()
2293 fn lookup_indexing(&self,
2295 base_expr: &'gcx hir::Expr,
2298 lvalue_pref: LvaluePreference)
2299 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2301 // FIXME(#18741) -- this is almost but not quite the same as the
2302 // autoderef that normal method probing does. They could likely be
2305 let mut autoderef = self.autoderef(base_expr.span, base_ty);
2307 while let Some((adj_ty, autoderefs)) = autoderef.next() {
2308 if let Some(final_mt) = self.try_index_step(
2309 MethodCall::expr(expr.id),
2310 expr, base_expr, adj_ty, autoderefs,
2311 false, lvalue_pref, idx_ty)
2313 autoderef.finalize(lvalue_pref, Some(base_expr));
2314 return Some(final_mt);
2317 if let ty::TyArray(element_ty, _) = adj_ty.sty {
2318 autoderef.finalize(lvalue_pref, Some(base_expr));
2319 let adjusted_ty = self.tcx.mk_slice(element_ty);
2320 return self.try_index_step(
2321 MethodCall::expr(expr.id), expr, base_expr,
2322 adjusted_ty, autoderefs, true, lvalue_pref, idx_ty);
2325 autoderef.unambiguous_final_ty();
2329 /// To type-check `base_expr[index_expr]`, we progressively autoderef
2330 /// (and otherwise adjust) `base_expr`, looking for a type which either
2331 /// supports builtin indexing or overloaded indexing.
2332 /// This loop implements one step in that search; the autoderef loop
2333 /// is implemented by `lookup_indexing`.
2334 fn try_index_step(&self,
2335 method_call: MethodCall,
2337 base_expr: &'gcx hir::Expr,
2338 adjusted_ty: Ty<'tcx>,
2341 lvalue_pref: LvaluePreference,
2343 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2346 debug!("try_index_step(expr={:?}, base_expr.id={:?}, adjusted_ty={:?}, \
2347 autoderefs={}, unsize={}, index_ty={:?})",
2355 let input_ty = self.next_ty_var(TypeVariableOrigin::AutoDeref(base_expr.span));
2357 // First, try built-in indexing.
2358 match (adjusted_ty.builtin_index(), &index_ty.sty) {
2359 (Some(ty), &ty::TyUint(ast::UintTy::Us)) | (Some(ty), &ty::TyInfer(ty::IntVar(_))) => {
2360 debug!("try_index_step: success, using built-in indexing");
2361 // If we had `[T; N]`, we should've caught it before unsizing to `[T]`.
2363 self.write_autoderef_adjustment(base_expr.id, autoderefs, adjusted_ty);
2364 return Some((tcx.types.usize, ty));
2369 // Try `IndexMut` first, if preferred.
2370 let method = match (lvalue_pref, tcx.lang_items.index_mut_trait()) {
2371 (PreferMutLvalue, Some(trait_did)) => {
2372 self.lookup_method_in_trait_adjusted(expr.span,
2374 Symbol::intern("index_mut"),
2379 Some(vec![input_ty]))
2384 // Otherwise, fall back to `Index`.
2385 let method = match (method, tcx.lang_items.index_trait()) {
2386 (None, Some(trait_did)) => {
2387 self.lookup_method_in_trait_adjusted(expr.span,
2389 Symbol::intern("index"),
2394 Some(vec![input_ty]))
2396 (method, _) => method,
2399 // If some lookup succeeds, write callee into table and extract index/element
2400 // type from the method signature.
2401 // If some lookup succeeded, install method in table
2402 method.map(|method| {
2403 debug!("try_index_step: success, using overloaded indexing");
2404 self.tables.borrow_mut().method_map.insert(method_call, method);
2405 (input_ty, self.make_overloaded_lvalue_return_type(method).ty)
2409 fn check_method_argument_types(&self,
2411 method_fn_ty: Ty<'tcx>,
2412 callee_expr: &'gcx hir::Expr,
2413 args_no_rcvr: &'gcx [hir::Expr],
2414 tuple_arguments: TupleArgumentsFlag,
2415 expected: Expectation<'tcx>)
2417 if method_fn_ty.references_error() {
2418 let err_inputs = self.err_args(args_no_rcvr.len());
2420 let err_inputs = match tuple_arguments {
2421 DontTupleArguments => err_inputs,
2422 TupleArguments => vec![self.tcx.intern_tup(&err_inputs[..])],
2425 self.check_argument_types(sp, &err_inputs[..], &[], args_no_rcvr,
2426 false, tuple_arguments, None);
2429 match method_fn_ty.sty {
2430 ty::TyFnDef(def_id, .., ref fty) => {
2431 // HACK(eddyb) ignore self in the definition (see above).
2432 let expected_arg_tys = self.expected_types_for_fn_args(
2436 &fty.sig.0.inputs()[1..]
2438 self.check_argument_types(sp, &fty.sig.0.inputs()[1..], &expected_arg_tys[..],
2439 args_no_rcvr, fty.sig.0.variadic, tuple_arguments,
2440 self.tcx.hir.span_if_local(def_id));
2444 span_bug!(callee_expr.span, "method without bare fn type");
2450 /// Generic function that factors out common logic from function calls,
2451 /// method calls and overloaded operators.
2452 fn check_argument_types(&self,
2454 fn_inputs: &[Ty<'tcx>],
2455 expected_arg_tys: &[Ty<'tcx>],
2456 args: &'gcx [hir::Expr],
2458 tuple_arguments: TupleArgumentsFlag,
2459 def_span: Option<Span>) {
2462 // Grab the argument types, supplying fresh type variables
2463 // if the wrong number of arguments were supplied
2464 let supplied_arg_count = if tuple_arguments == DontTupleArguments {
2470 // All the input types from the fn signature must outlive the call
2471 // so as to validate implied bounds.
2472 for &fn_input_ty in fn_inputs {
2473 self.register_wf_obligation(fn_input_ty, sp, traits::MiscObligation);
2476 let mut expected_arg_tys = expected_arg_tys;
2477 let expected_arg_count = fn_inputs.len();
2479 let sp_args = if args.len() > 0 {
2480 let (first, args) = args.split_at(1);
2481 let mut sp_tmp = first[0].span;
2483 let sp_opt = self.sess().codemap().merge_spans(sp_tmp, arg.span);
2484 if ! sp_opt.is_some() {
2487 sp_tmp = sp_opt.unwrap();
2494 fn parameter_count_error<'tcx>(sess: &Session, sp: Span, expected_count: usize,
2495 arg_count: usize, error_code: &str, variadic: bool,
2496 def_span: Option<Span>) {
2497 let mut err = sess.struct_span_err_with_code(sp,
2498 &format!("this function takes {}{} parameter{} but {} parameter{} supplied",
2499 if variadic {"at least "} else {""},
2501 if expected_count == 1 {""} else {"s"},
2503 if arg_count == 1 {" was"} else {"s were"}),
2506 err.span_label(sp, &format!("expected {}{} parameter{}",
2507 if variadic {"at least "} else {""},
2509 if expected_count == 1 {""} else {"s"}));
2510 if let Some(def_s) = def_span {
2511 err.span_label(def_s, &format!("defined here"));
2516 let formal_tys = if tuple_arguments == TupleArguments {
2517 let tuple_type = self.structurally_resolved_type(sp, fn_inputs[0]);
2518 match tuple_type.sty {
2519 ty::TyTuple(arg_types) if arg_types.len() != args.len() => {
2520 parameter_count_error(tcx.sess, sp_args, arg_types.len(), args.len(),
2521 "E0057", false, def_span);
2522 expected_arg_tys = &[];
2523 self.err_args(args.len())
2525 ty::TyTuple(arg_types) => {
2526 expected_arg_tys = match expected_arg_tys.get(0) {
2527 Some(&ty) => match ty.sty {
2528 ty::TyTuple(ref tys) => &tys,
2536 span_err!(tcx.sess, sp, E0059,
2537 "cannot use call notation; the first type parameter \
2538 for the function trait is neither a tuple nor unit");
2539 expected_arg_tys = &[];
2540 self.err_args(args.len())
2543 } else if expected_arg_count == supplied_arg_count {
2545 } else if variadic {
2546 if supplied_arg_count >= expected_arg_count {
2549 parameter_count_error(tcx.sess, sp_args, expected_arg_count,
2550 supplied_arg_count, "E0060", true, def_span);
2551 expected_arg_tys = &[];
2552 self.err_args(supplied_arg_count)
2555 parameter_count_error(tcx.sess, sp_args, expected_arg_count,
2556 supplied_arg_count, "E0061", false, def_span);
2557 expected_arg_tys = &[];
2558 self.err_args(supplied_arg_count)
2561 debug!("check_argument_types: formal_tys={:?}",
2562 formal_tys.iter().map(|t| self.ty_to_string(*t)).collect::<Vec<String>>());
2564 // Check the arguments.
2565 // We do this in a pretty awful way: first we typecheck any arguments
2566 // that are not closures, then we typecheck the closures. This is so
2567 // that we have more information about the types of arguments when we
2568 // typecheck the functions. This isn't really the right way to do this.
2569 for &check_closures in &[false, true] {
2570 debug!("check_closures={}", check_closures);
2572 // More awful hacks: before we check argument types, try to do
2573 // an "opportunistic" vtable resolution of any trait bounds on
2574 // the call. This helps coercions.
2576 self.select_obligations_where_possible();
2579 // For variadic functions, we don't have a declared type for all of
2580 // the arguments hence we only do our usual type checking with
2581 // the arguments who's types we do know.
2582 let t = if variadic {
2584 } else if tuple_arguments == TupleArguments {
2589 for (i, arg) in args.iter().take(t).enumerate() {
2590 // Warn only for the first loop (the "no closures" one).
2591 // Closure arguments themselves can't be diverging, but
2592 // a previous argument can, e.g. `foo(panic!(), || {})`.
2593 if !check_closures {
2594 self.warn_if_unreachable(arg.id, arg.span, "expression");
2597 let is_closure = match arg.node {
2598 hir::ExprClosure(..) => true,
2602 if is_closure != check_closures {
2606 debug!("checking the argument");
2607 let formal_ty = formal_tys[i];
2609 // The special-cased logic below has three functions:
2610 // 1. Provide as good of an expected type as possible.
2611 let expected = expected_arg_tys.get(i).map(|&ty| {
2612 Expectation::rvalue_hint(self, ty)
2615 let checked_ty = self.check_expr_with_expectation(&arg,
2616 expected.unwrap_or(ExpectHasType(formal_ty)));
2617 // 2. Coerce to the most detailed type that could be coerced
2618 // to, which is `expected_ty` if `rvalue_hint` returns an
2619 // `ExpectHasType(expected_ty)`, or the `formal_ty` otherwise.
2620 let coerce_ty = expected.and_then(|e| e.only_has_type(self));
2621 self.demand_coerce(&arg, checked_ty, coerce_ty.unwrap_or(formal_ty));
2623 // 3. Relate the expected type and the formal one,
2624 // if the expected type was used for the coercion.
2625 coerce_ty.map(|ty| self.demand_suptype(arg.span, formal_ty, ty));
2629 // We also need to make sure we at least write the ty of the other
2630 // arguments which we skipped above.
2632 for arg in args.iter().skip(expected_arg_count) {
2633 let arg_ty = self.check_expr(&arg);
2635 // There are a few types which get autopromoted when passed via varargs
2636 // in C but we just error out instead and require explicit casts.
2637 let arg_ty = self.structurally_resolved_type(arg.span,
2640 ty::TyFloat(ast::FloatTy::F32) => {
2641 self.type_error_message(arg.span, |t| {
2642 format!("can't pass an `{}` to variadic \
2643 function, cast to `c_double`", t)
2646 ty::TyInt(ast::IntTy::I8) | ty::TyInt(ast::IntTy::I16) | ty::TyBool => {
2647 self.type_error_message(arg.span, |t| {
2648 format!("can't pass `{}` to variadic \
2649 function, cast to `c_int`",
2653 ty::TyUint(ast::UintTy::U8) | ty::TyUint(ast::UintTy::U16) => {
2654 self.type_error_message(arg.span, |t| {
2655 format!("can't pass `{}` to variadic \
2656 function, cast to `c_uint`",
2660 ty::TyFnDef(.., f) => {
2661 let ptr_ty = self.tcx.mk_fn_ptr(f);
2662 let ptr_ty = self.resolve_type_vars_if_possible(&ptr_ty);
2663 self.type_error_message(arg.span,
2665 format!("can't pass `{}` to variadic \
2666 function, cast to `{}`", t, ptr_ty)
2675 fn err_args(&self, len: usize) -> Vec<Ty<'tcx>> {
2676 (0..len).map(|_| self.tcx.types.err).collect()
2679 // AST fragment checking
2682 expected: Expectation<'tcx>)
2688 ast::LitKind::Str(..) => tcx.mk_static_str(),
2689 ast::LitKind::ByteStr(ref v) => {
2690 tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic),
2691 tcx.mk_array(tcx.types.u8, v.len()))
2693 ast::LitKind::Byte(_) => tcx.types.u8,
2694 ast::LitKind::Char(_) => tcx.types.char,
2695 ast::LitKind::Int(_, ast::LitIntType::Signed(t)) => tcx.mk_mach_int(t),
2696 ast::LitKind::Int(_, ast::LitIntType::Unsigned(t)) => tcx.mk_mach_uint(t),
2697 ast::LitKind::Int(_, ast::LitIntType::Unsuffixed) => {
2698 let opt_ty = expected.to_option(self).and_then(|ty| {
2700 ty::TyInt(_) | ty::TyUint(_) => Some(ty),
2701 ty::TyChar => Some(tcx.types.u8),
2702 ty::TyRawPtr(..) => Some(tcx.types.usize),
2703 ty::TyFnDef(..) | ty::TyFnPtr(_) => Some(tcx.types.usize),
2707 opt_ty.unwrap_or_else(
2708 || tcx.mk_int_var(self.next_int_var_id()))
2710 ast::LitKind::Float(_, t) => tcx.mk_mach_float(t),
2711 ast::LitKind::FloatUnsuffixed(_) => {
2712 let opt_ty = expected.to_option(self).and_then(|ty| {
2714 ty::TyFloat(_) => Some(ty),
2718 opt_ty.unwrap_or_else(
2719 || tcx.mk_float_var(self.next_float_var_id()))
2721 ast::LitKind::Bool(_) => tcx.types.bool
2725 fn check_expr_eq_type(&self,
2726 expr: &'gcx hir::Expr,
2727 expected: Ty<'tcx>) {
2728 let ty = self.check_expr_with_hint(expr, expected);
2729 self.demand_eqtype(expr.span, expected, ty);
2732 pub fn check_expr_has_type(&self,
2733 expr: &'gcx hir::Expr,
2734 expected: Ty<'tcx>) -> Ty<'tcx> {
2735 let ty = self.check_expr_with_hint(expr, expected);
2736 self.demand_suptype(expr.span, expected, ty);
2740 fn check_expr_coercable_to_type(&self,
2741 expr: &'gcx hir::Expr,
2742 expected: Ty<'tcx>) -> Ty<'tcx> {
2743 let ty = self.check_expr_with_hint(expr, expected);
2744 self.demand_coerce(expr, ty, expected);
2748 fn check_expr_with_hint(&self, expr: &'gcx hir::Expr,
2749 expected: Ty<'tcx>) -> Ty<'tcx> {
2750 self.check_expr_with_expectation(expr, ExpectHasType(expected))
2753 fn check_expr_with_expectation(&self,
2754 expr: &'gcx hir::Expr,
2755 expected: Expectation<'tcx>) -> Ty<'tcx> {
2756 self.check_expr_with_expectation_and_lvalue_pref(expr, expected, NoPreference)
2759 fn check_expr(&self, expr: &'gcx hir::Expr) -> Ty<'tcx> {
2760 self.check_expr_with_expectation(expr, NoExpectation)
2763 fn check_expr_with_lvalue_pref(&self, expr: &'gcx hir::Expr,
2764 lvalue_pref: LvaluePreference) -> Ty<'tcx> {
2765 self.check_expr_with_expectation_and_lvalue_pref(expr, NoExpectation, lvalue_pref)
2768 // determine the `self` type, using fresh variables for all variables
2769 // declared on the impl declaration e.g., `impl<A,B> for Vec<(A,B)>`
2770 // would return ($0, $1) where $0 and $1 are freshly instantiated type
2772 pub fn impl_self_ty(&self,
2773 span: Span, // (potential) receiver for this impl
2775 -> TypeAndSubsts<'tcx> {
2776 let ity = self.tcx.item_type(did);
2777 debug!("impl_self_ty: ity={:?}", ity);
2779 let substs = self.fresh_substs_for_item(span, did);
2780 let substd_ty = self.instantiate_type_scheme(span, &substs, &ity);
2782 TypeAndSubsts { substs: substs, ty: substd_ty }
2785 /// Unifies the return type with the expected type early, for more coercions
2786 /// and forward type information on the argument expressions.
2787 fn expected_types_for_fn_args(&self,
2789 expected_ret: Expectation<'tcx>,
2790 formal_ret: Ty<'tcx>,
2791 formal_args: &[Ty<'tcx>])
2793 let expected_args = expected_ret.only_has_type(self).and_then(|ret_ty| {
2794 self.fudge_regions_if_ok(&RegionVariableOrigin::Coercion(call_span), || {
2795 // Attempt to apply a subtyping relationship between the formal
2796 // return type (likely containing type variables if the function
2797 // is polymorphic) and the expected return type.
2798 // No argument expectations are produced if unification fails.
2799 let origin = self.misc(call_span);
2800 let ures = self.sub_types(false, &origin, formal_ret, ret_ty);
2801 // FIXME(#15760) can't use try! here, FromError doesn't default
2802 // to identity so the resulting type is not constrained.
2804 Ok(ok) => self.register_infer_ok_obligations(ok),
2805 Err(e) => return Err(e),
2808 // Record all the argument types, with the substitutions
2809 // produced from the above subtyping unification.
2810 Ok(formal_args.iter().map(|ty| {
2811 self.resolve_type_vars_if_possible(ty)
2814 }).unwrap_or(vec![]);
2815 debug!("expected_types_for_fn_args(formal={:?} -> {:?}, expected={:?} -> {:?})",
2816 formal_args, formal_ret,
2817 expected_args, expected_ret);
2821 // Checks a method call.
2822 fn check_method_call(&self,
2823 expr: &'gcx hir::Expr,
2824 method_name: Spanned<ast::Name>,
2825 args: &'gcx [hir::Expr],
2827 expected: Expectation<'tcx>,
2828 lvalue_pref: LvaluePreference) -> Ty<'tcx> {
2829 let rcvr = &args[0];
2830 let rcvr_t = self.check_expr_with_lvalue_pref(&rcvr, lvalue_pref);
2832 // no need to check for bot/err -- callee does that
2833 let expr_t = self.structurally_resolved_type(expr.span, rcvr_t);
2835 let tps = tps.iter().map(|ast_ty| self.to_ty(&ast_ty)).collect::<Vec<_>>();
2836 let fn_ty = match self.lookup_method(method_name.span,
2843 let method_ty = method.ty;
2844 let method_call = MethodCall::expr(expr.id);
2845 self.tables.borrow_mut().method_map.insert(method_call, method);
2849 if method_name.node != keywords::Invalid.name() {
2850 self.report_method_error(method_name.span,
2857 self.write_error(expr.id);
2862 // Call the generic checker.
2863 let ret_ty = self.check_method_argument_types(method_name.span, fn_ty,
2871 // A generic function for checking the then and else in an if
2873 fn check_then_else(&self,
2874 cond_expr: &'gcx hir::Expr,
2875 then_blk: &'gcx hir::Block,
2876 opt_else_expr: Option<&'gcx hir::Expr>,
2878 expected: Expectation<'tcx>) -> Ty<'tcx> {
2879 let cond_ty = self.check_expr_has_type(cond_expr, self.tcx.types.bool);
2880 let cond_diverges = self.diverges.get();
2881 self.diverges.set(Diverges::Maybe);
2883 let expected = expected.adjust_for_branches(self);
2884 let then_ty = self.check_block_with_expected(then_blk, expected);
2885 let then_diverges = self.diverges.get();
2886 self.diverges.set(Diverges::Maybe);
2888 let unit = self.tcx.mk_nil();
2889 let (cause, expected_ty, found_ty, result);
2890 if let Some(else_expr) = opt_else_expr {
2891 let else_ty = self.check_expr_with_expectation(else_expr, expected);
2892 let else_diverges = self.diverges.get();
2893 cause = self.cause(sp, ObligationCauseCode::IfExpression);
2895 // Only try to coerce-unify if we have a then expression
2896 // to assign coercions to, otherwise it's () or diverging.
2897 expected_ty = then_ty;
2899 result = if let Some(ref then) = then_blk.expr {
2900 let res = self.try_find_coercion_lub(&cause, || Some(&**then),
2901 then_ty, else_expr, else_ty);
2903 // In case we did perform an adjustment, we have to update
2904 // the type of the block, because old trans still uses it.
2906 let adj = self.tables.borrow().adjustments.get(&then.id).cloned();
2907 if let Some(adj) = adj {
2908 self.write_ty(then_blk.id, adj.target);
2914 self.commit_if_ok(|_| {
2915 let trace = TypeTrace::types(&cause, true, then_ty, else_ty);
2916 self.lub(true, trace, &then_ty, &else_ty)
2917 .map(|ok| self.register_infer_ok_obligations(ok))
2921 // We won't diverge unless both branches do (or the condition does).
2922 self.diverges.set(cond_diverges | then_diverges & else_diverges);
2924 // If the condition is false we can't diverge.
2925 self.diverges.set(cond_diverges);
2927 cause = self.cause(sp, ObligationCauseCode::IfExpressionWithNoElse);
2930 result = self.eq_types(true, &cause, unit, then_ty)
2932 self.register_infer_ok_obligations(ok);
2939 if cond_ty.references_error() {
2946 self.report_mismatched_types(&cause, expected_ty, found_ty, e).emit();
2952 // Check field access expressions
2953 fn check_field(&self,
2954 expr: &'gcx hir::Expr,
2955 lvalue_pref: LvaluePreference,
2956 base: &'gcx hir::Expr,
2957 field: &Spanned<ast::Name>) -> Ty<'tcx> {
2958 let expr_t = self.check_expr_with_lvalue_pref(base, lvalue_pref);
2959 let expr_t = self.structurally_resolved_type(expr.span,
2961 let mut private_candidate = None;
2962 let mut autoderef = self.autoderef(expr.span, expr_t);
2963 while let Some((base_t, autoderefs)) = autoderef.next() {
2965 ty::TyAdt(base_def, substs) if !base_def.is_enum() => {
2966 debug!("struct named {:?}", base_t);
2967 if let Some(field) = base_def.struct_variant().find_field_named(field.node) {
2968 let field_ty = self.field_ty(expr.span, field, substs);
2969 if self.tcx.vis_is_accessible_from(field.vis, self.body_id) {
2970 autoderef.finalize(lvalue_pref, Some(base));
2971 self.write_autoderef_adjustment(base.id, autoderefs, base_t);
2973 self.tcx.check_stability(field.did, expr.id, expr.span);
2977 private_candidate = Some((base_def.did, field_ty));
2983 autoderef.unambiguous_final_ty();
2985 if let Some((did, field_ty)) = private_candidate {
2986 let struct_path = self.tcx().item_path_str(did);
2987 let msg = format!("field `{}` of struct `{}` is private", field.node, struct_path);
2988 let mut err = self.tcx().sess.struct_span_err(expr.span, &msg);
2989 // Also check if an accessible method exists, which is often what is meant.
2990 if self.method_exists(field.span, field.node, expr_t, expr.id, false) {
2991 err.note(&format!("a method `{}` also exists, perhaps you wish to call it",
2996 } else if field.node == keywords::Invalid.name() {
2997 self.tcx().types.err
2998 } else if self.method_exists(field.span, field.node, expr_t, expr.id, true) {
2999 self.type_error_struct(field.span, |actual| {
3000 format!("attempted to take value of method `{}` on type \
3001 `{}`", field.node, actual)
3003 .help("maybe a `()` to call it is missing? \
3004 If not, try an anonymous function")
3006 self.tcx().types.err
3008 let mut err = self.type_error_struct(field.span, |actual| {
3009 format!("no field `{}` on type `{}`",
3013 ty::TyAdt(def, _) if !def.is_enum() => {
3014 if let Some(suggested_field_name) =
3015 Self::suggest_field_name(def.struct_variant(), field, vec![]) {
3016 err.span_label(field.span,
3017 &format!("did you mean `{}`?", suggested_field_name));
3019 err.span_label(field.span,
3020 &format!("unknown field"));
3023 ty::TyRawPtr(..) => {
3024 err.note(&format!("`{0}` is a native pointer; perhaps you need to deref with \
3026 self.tcx.hir.node_to_pretty_string(base.id),
3032 self.tcx().types.err
3036 // Return an hint about the closest match in field names
3037 fn suggest_field_name(variant: &'tcx ty::VariantDef,
3038 field: &Spanned<ast::Name>,
3039 skip : Vec<InternedString>)
3041 let name = field.node.as_str();
3042 let names = variant.fields.iter().filter_map(|field| {
3043 // ignore already set fields and private fields from non-local crates
3044 if skip.iter().any(|x| *x == field.name.as_str()) ||
3045 (variant.did.krate != LOCAL_CRATE && field.vis != Visibility::Public) {
3052 // only find fits with at least one matching letter
3053 find_best_match_for_name(names, &name, Some(name.len()))
3056 // Check tuple index expressions
3057 fn check_tup_field(&self,
3058 expr: &'gcx hir::Expr,
3059 lvalue_pref: LvaluePreference,
3060 base: &'gcx hir::Expr,
3061 idx: codemap::Spanned<usize>) -> Ty<'tcx> {
3062 let expr_t = self.check_expr_with_lvalue_pref(base, lvalue_pref);
3063 let expr_t = self.structurally_resolved_type(expr.span,
3065 let mut private_candidate = None;
3066 let mut tuple_like = false;
3067 let mut autoderef = self.autoderef(expr.span, expr_t);
3068 while let Some((base_t, autoderefs)) = autoderef.next() {
3069 let field = match base_t.sty {
3070 ty::TyAdt(base_def, substs) if base_def.is_struct() => {
3071 tuple_like = base_def.struct_variant().ctor_kind == CtorKind::Fn;
3072 if !tuple_like { continue }
3074 debug!("tuple struct named {:?}", base_t);
3075 base_def.struct_variant().fields.get(idx.node).and_then(|field| {
3076 let field_ty = self.field_ty(expr.span, field, substs);
3077 private_candidate = Some((base_def.did, field_ty));
3078 if self.tcx.vis_is_accessible_from(field.vis, self.body_id) {
3079 self.tcx.check_stability(field.did, expr.id, expr.span);
3086 ty::TyTuple(ref v) => {
3088 v.get(idx.node).cloned()
3093 if let Some(field_ty) = field {
3094 autoderef.finalize(lvalue_pref, Some(base));
3095 self.write_autoderef_adjustment(base.id, autoderefs, base_t);
3099 autoderef.unambiguous_final_ty();
3101 if let Some((did, field_ty)) = private_candidate {
3102 let struct_path = self.tcx().item_path_str(did);
3103 let msg = format!("field `{}` of struct `{}` is private", idx.node, struct_path);
3104 self.tcx().sess.span_err(expr.span, &msg);
3108 self.type_error_message(
3112 format!("attempted out-of-bounds tuple index `{}` on \
3117 format!("attempted tuple index `{}` on type `{}`, but the \
3118 type was not a tuple or tuple struct",
3125 self.tcx().types.err
3128 fn report_unknown_field(&self,
3130 variant: &'tcx ty::VariantDef,
3132 skip_fields: &[hir::Field],
3134 let mut err = self.type_error_struct_with_diag(
3136 |actual| match ty.sty {
3137 ty::TyAdt(adt, ..) if adt.is_enum() => {
3138 struct_span_err!(self.tcx.sess, field.name.span, E0559,
3139 "{} `{}::{}` has no field named `{}`",
3140 kind_name, actual, variant.name, field.name.node)
3143 struct_span_err!(self.tcx.sess, field.name.span, E0560,
3144 "{} `{}` has no field named `{}`",
3145 kind_name, actual, field.name.node)
3149 // prevent all specified fields from being suggested
3150 let skip_fields = skip_fields.iter().map(|ref x| x.name.node.as_str());
3151 if let Some(field_name) = Self::suggest_field_name(variant,
3153 skip_fields.collect()) {
3154 err.span_label(field.name.span,
3155 &format!("field does not exist - did you mean `{}`?", field_name));
3158 ty::TyAdt(adt, ..) if adt.is_enum() => {
3159 err.span_label(field.name.span, &format!("`{}::{}` does not have this field",
3163 err.span_label(field.name.span, &format!("`{}` does not have this field", ty));
3170 fn check_expr_struct_fields(&self,
3172 expr_id: ast::NodeId,
3174 variant: &'tcx ty::VariantDef,
3175 ast_fields: &'gcx [hir::Field],
3176 check_completeness: bool) {
3178 let (substs, adt_kind, kind_name) = match adt_ty.sty {
3179 ty::TyAdt(adt, substs) => (substs, adt.adt_kind(), adt.variant_descr()),
3180 _ => span_bug!(span, "non-ADT passed to check_expr_struct_fields")
3183 let mut remaining_fields = FxHashMap();
3184 for field in &variant.fields {
3185 remaining_fields.insert(field.name, field);
3188 let mut seen_fields = FxHashMap();
3190 let mut error_happened = false;
3192 // Typecheck each field.
3193 for field in ast_fields {
3194 let expected_field_type;
3196 if let Some(v_field) = remaining_fields.remove(&field.name.node) {
3197 expected_field_type = self.field_ty(field.span, v_field, substs);
3199 seen_fields.insert(field.name.node, field.span);
3201 // we don't look at stability attributes on
3202 // struct-like enums (yet...), but it's definitely not
3203 // a bug to have construct one.
3204 if adt_kind != ty::AdtKind::Enum {
3205 tcx.check_stability(v_field.did, expr_id, field.span);
3208 error_happened = true;
3209 expected_field_type = tcx.types.err;
3210 if let Some(_) = variant.find_field_named(field.name.node) {
3211 let mut err = struct_span_err!(self.tcx.sess,
3214 "field `{}` specified more than once",
3217 err.span_label(field.name.span, &format!("used more than once"));
3219 if let Some(prev_span) = seen_fields.get(&field.name.node) {
3220 err.span_label(*prev_span, &format!("first use of `{}`", field.name.node));
3225 self.report_unknown_field(adt_ty, variant, field, ast_fields, kind_name);
3229 // Make sure to give a type to the field even if there's
3230 // an error, so we can continue typechecking
3231 self.check_expr_coercable_to_type(&field.expr, expected_field_type);
3234 // Make sure the programmer specified correct number of fields.
3235 if kind_name == "union" {
3236 if ast_fields.len() != 1 {
3237 tcx.sess.span_err(span, "union expressions should have exactly one field");
3239 } else if check_completeness && !error_happened && !remaining_fields.is_empty() {
3240 let len = remaining_fields.len();
3242 let mut displayable_field_names = remaining_fields
3244 .map(|x| x.as_str())
3245 .collect::<Vec<_>>();
3247 displayable_field_names.sort();
3249 let truncated_fields_error = if len <= 3 {
3252 format!(" and {} other field{}", (len - 3), if len - 3 == 1 {""} else {"s"})
3255 let remaining_fields_names = displayable_field_names.iter().take(3)
3256 .map(|n| format!("`{}`", n))
3257 .collect::<Vec<_>>()
3260 struct_span_err!(tcx.sess, span, E0063,
3261 "missing field{} {}{} in initializer of `{}`",
3262 if remaining_fields.len() == 1 {""} else {"s"},
3263 remaining_fields_names,
3264 truncated_fields_error,
3266 .span_label(span, &format!("missing {}{}",
3267 remaining_fields_names,
3268 truncated_fields_error))
3273 fn check_struct_fields_on_error(&self,
3274 fields: &'gcx [hir::Field],
3275 base_expr: &'gcx Option<P<hir::Expr>>) {
3276 for field in fields {
3277 self.check_expr(&field.expr);
3281 self.check_expr(&base);
3287 pub fn check_struct_path(&self,
3289 node_id: ast::NodeId)
3290 -> Option<(&'tcx ty::VariantDef, Ty<'tcx>)> {
3291 let path_span = match *qpath {
3292 hir::QPath::Resolved(_, ref path) => path.span,
3293 hir::QPath::TypeRelative(ref qself, _) => qself.span
3295 let (def, ty) = self.finish_resolving_struct_path(qpath, path_span, node_id);
3296 let variant = match def {
3298 self.set_tainted_by_errors();
3301 Def::Variant(..) => {
3303 ty::TyAdt(adt, substs) => {
3304 Some((adt.variant_of_def(def), adt.did, substs))
3306 _ => bug!("unexpected type: {:?}", ty.sty)
3309 Def::Struct(..) | Def::Union(..) | Def::TyAlias(..) |
3310 Def::AssociatedTy(..) | Def::SelfTy(..) => {
3312 ty::TyAdt(adt, substs) if !adt.is_enum() => {
3313 Some((adt.struct_variant(), adt.did, substs))
3318 _ => bug!("unexpected definition: {:?}", def)
3321 if let Some((variant, did, substs)) = variant {
3322 // Check bounds on type arguments used in the path.
3323 let bounds = self.instantiate_bounds(path_span, did, substs);
3324 let cause = traits::ObligationCause::new(path_span, self.body_id,
3325 traits::ItemObligation(did));
3326 self.add_obligations_for_parameters(cause, &bounds);
3330 struct_span_err!(self.tcx.sess, path_span, E0071,
3331 "expected struct, variant or union type, found {}",
3332 ty.sort_string(self.tcx))
3333 .span_label(path_span, &format!("not a struct"))
3339 fn check_expr_struct(&self,
3342 fields: &'gcx [hir::Field],
3343 base_expr: &'gcx Option<P<hir::Expr>>) -> Ty<'tcx>
3345 // Find the relevant variant
3346 let (variant, struct_ty) =
3347 if let Some(variant_ty) = self.check_struct_path(qpath, expr.id) {
3350 self.check_struct_fields_on_error(fields, base_expr);
3351 return self.tcx.types.err;
3354 let path_span = match *qpath {
3355 hir::QPath::Resolved(_, ref path) => path.span,
3356 hir::QPath::TypeRelative(ref qself, _) => qself.span
3359 self.check_expr_struct_fields(struct_ty, expr.id, path_span, variant, fields,
3360 base_expr.is_none());
3361 if let &Some(ref base_expr) = base_expr {
3362 self.check_expr_has_type(base_expr, struct_ty);
3363 match struct_ty.sty {
3364 ty::TyAdt(adt, substs) if adt.is_struct() => {
3365 self.tables.borrow_mut().fru_field_types.insert(
3367 adt.struct_variant().fields.iter().map(|f| {
3368 self.normalize_associated_types_in(
3369 expr.span, &f.ty(self.tcx, substs)
3375 span_err!(self.tcx.sess, base_expr.span, E0436,
3376 "functional record update syntax requires a struct");
3380 self.require_type_is_sized(struct_ty, expr.span, traits::StructInitializerSized);
3386 /// If an expression has any sub-expressions that result in a type error,
3387 /// inspecting that expression's type with `ty.references_error()` will return
3388 /// true. Likewise, if an expression is known to diverge, inspecting its
3389 /// type with `ty::type_is_bot` will return true (n.b.: since Rust is
3390 /// strict, _|_ can appear in the type of an expression that does not,
3391 /// itself, diverge: for example, fn() -> _|_.)
3392 /// Note that inspecting a type's structure *directly* may expose the fact
3393 /// that there are actually multiple representations for `TyError`, so avoid
3394 /// that when err needs to be handled differently.
3395 fn check_expr_with_expectation_and_lvalue_pref(&self,
3396 expr: &'gcx hir::Expr,
3397 expected: Expectation<'tcx>,
3398 lvalue_pref: LvaluePreference) -> Ty<'tcx> {
3399 debug!(">> typechecking: expr={:?} expected={:?}",
3402 // Warn for expressions after diverging siblings.
3403 self.warn_if_unreachable(expr.id, expr.span, "expression");
3405 // Hide the outer diverging and has_errors flags.
3406 let old_diverges = self.diverges.get();
3407 let old_has_errors = self.has_errors.get();
3408 self.diverges.set(Diverges::Maybe);
3409 self.has_errors.set(false);
3411 let ty = self.check_expr_kind(expr, expected, lvalue_pref);
3413 // Warn for non-block expressions with diverging children.
3416 hir::ExprLoop(..) | hir::ExprWhile(..) |
3417 hir::ExprIf(..) | hir::ExprMatch(..) => {}
3419 _ => self.warn_if_unreachable(expr.id, expr.span, "expression")
3422 // Record the type, which applies it effects.
3423 // We need to do this after the warning above, so that
3424 // we don't warn for the diverging expression itself.
3425 self.write_ty(expr.id, ty);
3427 // Combine the diverging and has_error flags.
3428 self.diverges.set(self.diverges.get() | old_diverges);
3429 self.has_errors.set(self.has_errors.get() | old_has_errors);
3431 debug!("type of {} is...", self.tcx.hir.node_to_string(expr.id));
3432 debug!("... {:?}, expected is {:?}", ty, expected);
3434 // Add adjustments to !-expressions
3436 if let Some(hir::map::NodeExpr(node_expr)) = self.tcx.hir.find(expr.id) {
3437 let adj_ty = self.next_diverging_ty_var(
3438 TypeVariableOrigin::AdjustmentType(node_expr.span));
3439 self.write_adjustment(expr.id, adjustment::Adjustment {
3440 kind: adjustment::Adjust::NeverToAny,
3449 fn check_expr_kind(&self,
3450 expr: &'gcx hir::Expr,
3451 expected: Expectation<'tcx>,
3452 lvalue_pref: LvaluePreference) -> Ty<'tcx> {
3456 hir::ExprBox(ref subexpr) => {
3457 let expected_inner = expected.to_option(self).map_or(NoExpectation, |ty| {
3459 ty::TyBox(ty) => Expectation::rvalue_hint(self, ty),
3463 let referent_ty = self.check_expr_with_expectation(subexpr, expected_inner);
3464 tcx.mk_box(referent_ty)
3467 hir::ExprLit(ref lit) => {
3468 self.check_lit(&lit, expected)
3470 hir::ExprBinary(op, ref lhs, ref rhs) => {
3471 self.check_binop(expr, op, lhs, rhs)
3473 hir::ExprAssignOp(op, ref lhs, ref rhs) => {
3474 self.check_binop_assign(expr, op, lhs, rhs)
3476 hir::ExprUnary(unop, ref oprnd) => {
3477 let expected_inner = match unop {
3478 hir::UnNot | hir::UnNeg => {
3485 let lvalue_pref = match unop {
3486 hir::UnDeref => lvalue_pref,
3489 let mut oprnd_t = self.check_expr_with_expectation_and_lvalue_pref(&oprnd,
3493 if !oprnd_t.references_error() {
3496 oprnd_t = self.structurally_resolved_type(expr.span, oprnd_t);
3498 if let Some(mt) = oprnd_t.builtin_deref(true, NoPreference) {
3500 } else if let Some(method) = self.try_overloaded_deref(
3501 expr.span, Some(&oprnd), oprnd_t, lvalue_pref) {
3502 oprnd_t = self.make_overloaded_lvalue_return_type(method).ty;
3503 self.tables.borrow_mut().method_map.insert(MethodCall::expr(expr.id),
3506 self.type_error_message(expr.span, |actual| {
3507 format!("type `{}` cannot be \
3508 dereferenced", actual)
3510 oprnd_t = tcx.types.err;
3514 oprnd_t = self.structurally_resolved_type(oprnd.span,
3516 let result = self.check_user_unop("!", "not",
3517 tcx.lang_items.not_trait(),
3518 expr, &oprnd, oprnd_t, unop);
3519 // If it's builtin, we can reuse the type, this helps inference.
3520 if !(oprnd_t.is_integral() || oprnd_t.sty == ty::TyBool) {
3525 oprnd_t = self.structurally_resolved_type(oprnd.span,
3527 let result = self.check_user_unop("-", "neg",
3528 tcx.lang_items.neg_trait(),
3529 expr, &oprnd, oprnd_t, unop);
3530 // If it's builtin, we can reuse the type, this helps inference.
3531 if !(oprnd_t.is_integral() || oprnd_t.is_fp()) {
3539 hir::ExprAddrOf(mutbl, ref oprnd) => {
3540 let hint = expected.only_has_type(self).map_or(NoExpectation, |ty| {
3542 ty::TyRef(_, ref mt) | ty::TyRawPtr(ref mt) => {
3543 if self.tcx.expr_is_lval(&oprnd) {
3544 // Lvalues may legitimately have unsized types.
3545 // For example, dereferences of a fat pointer and
3546 // the last field of a struct can be unsized.
3547 ExpectHasType(mt.ty)
3549 Expectation::rvalue_hint(self, mt.ty)
3555 let lvalue_pref = LvaluePreference::from_mutbl(mutbl);
3556 let ty = self.check_expr_with_expectation_and_lvalue_pref(&oprnd, hint, lvalue_pref);
3558 let tm = ty::TypeAndMut { ty: ty, mutbl: mutbl };
3559 if tm.ty.references_error() {
3562 // Note: at this point, we cannot say what the best lifetime
3563 // is to use for resulting pointer. We want to use the
3564 // shortest lifetime possible so as to avoid spurious borrowck
3565 // errors. Moreover, the longest lifetime will depend on the
3566 // precise details of the value whose address is being taken
3567 // (and how long it is valid), which we don't know yet until type
3568 // inference is complete.
3570 // Therefore, here we simply generate a region variable. The
3571 // region inferencer will then select the ultimate value.
3572 // Finally, borrowck is charged with guaranteeing that the
3573 // value whose address was taken can actually be made to live
3574 // as long as it needs to live.
3575 let region = self.next_region_var(infer::AddrOfRegion(expr.span));
3576 tcx.mk_ref(region, tm)
3579 hir::ExprPath(ref qpath) => {
3580 let (def, opt_ty, segments) = self.resolve_ty_and_def_ufcs(qpath,
3581 expr.id, expr.span);
3582 let ty = if def != Def::Err {
3583 self.instantiate_value_path(segments, opt_ty, def, expr.span, id)
3585 self.set_tainted_by_errors();
3589 // We always require that the type provided as the value for
3590 // a type parameter outlives the moment of instantiation.
3591 self.opt_node_ty_substs(expr.id, |item_substs| {
3592 self.add_wf_bounds(&item_substs.substs, expr);
3597 hir::ExprInlineAsm(_, ref outputs, ref inputs) => {
3598 for output in outputs {
3599 self.check_expr(output);
3601 for input in inputs {
3602 self.check_expr(input);
3606 hir::ExprBreak(label, ref expr_opt) => {
3607 let loop_id = label.map(|l| l.loop_id);
3609 let mut enclosing_loops = self.enclosing_loops.borrow_mut();
3610 enclosing_loops.find_loop(loop_id).map(|ctxt| ctxt.coerce_to)
3612 if let Some(coerce_to) = coerce_to {
3615 if let Some(ref e) = *expr_opt {
3616 // Recurse without `enclosing_loops` borrowed.
3617 e_ty = self.check_expr_with_hint(e, coerce_to);
3618 cause = self.misc(e.span);
3619 // Notably, the recursive call may alter coerce_to - must not keep using it!
3621 // `break` without argument acts like `break ()`.
3622 e_ty = tcx.mk_nil();
3623 cause = self.misc(expr.span);
3625 let mut enclosing_loops = self.enclosing_loops.borrow_mut();
3626 let ctxt = enclosing_loops.find_loop(loop_id).unwrap();
3628 let result = if let Some(ref e) = *expr_opt {
3629 // Special-case the first element, as it has no "previous expressions".
3630 let result = if !ctxt.may_break {
3631 self.try_coerce(e, e_ty, ctxt.coerce_to)
3633 self.try_find_coercion_lub(&cause, || ctxt.break_exprs.iter().cloned(),
3634 ctxt.unified, e, e_ty)
3637 ctxt.break_exprs.push(e);
3640 self.eq_types(true, &cause, e_ty, ctxt.unified)
3641 .map(|InferOk { obligations, .. }| {
3642 // FIXME(#32730) propagate obligations
3643 assert!(obligations.is_empty());
3648 Ok(ty) => ctxt.unified = ty,
3650 self.report_mismatched_types(&cause, ctxt.unified, e_ty, err).emit();
3654 ctxt.may_break = true;
3656 // Otherwise, we failed to find the enclosing loop; this can only happen if the
3657 // `break` was not inside a loop at all, which is caught by the loop-checking pass.
3660 hir::ExprAgain(_) => { tcx.types.never }
3661 hir::ExprRet(ref expr_opt) => {
3662 if self.ret_ty.is_none() {
3663 struct_span_err!(self.tcx.sess, expr.span, E0572,
3664 "return statement outside of function body").emit();
3665 } else if let Some(ref e) = *expr_opt {
3666 self.check_expr_coercable_to_type(&e, self.ret_ty.unwrap());
3668 match self.eq_types(false,
3669 &self.misc(expr.span),
3670 self.ret_ty.unwrap(),
3672 Ok(ok) => self.register_infer_ok_obligations(ok),
3674 struct_span_err!(tcx.sess, expr.span, E0069,
3675 "`return;` in a function whose return type is not `()`")
3676 .span_label(expr.span, &format!("return type is not ()"))
3683 hir::ExprAssign(ref lhs, ref rhs) => {
3684 let lhs_ty = self.check_expr_with_lvalue_pref(&lhs, PreferMutLvalue);
3687 if !tcx.expr_is_lval(&lhs) {
3689 tcx.sess, expr.span, E0070,
3690 "invalid left-hand side expression")
3693 &format!("left-hand of expression not valid"))
3697 let rhs_ty = self.check_expr_coercable_to_type(&rhs, lhs_ty);
3699 self.require_type_is_sized(lhs_ty, lhs.span, traits::AssignmentLhsSized);
3701 if lhs_ty.references_error() || rhs_ty.references_error() {
3707 hir::ExprIf(ref cond, ref then_blk, ref opt_else_expr) => {
3708 self.check_then_else(&cond, &then_blk, opt_else_expr.as_ref().map(|e| &**e),
3709 expr.span, expected)
3711 hir::ExprWhile(ref cond, ref body, _) => {
3712 let unified = self.tcx.mk_nil();
3713 let coerce_to = unified;
3714 let ctxt = LoopCtxt {
3716 coerce_to: coerce_to,
3717 break_exprs: vec![],
3720 self.with_loop_ctxt(expr.id, ctxt, || {
3721 self.check_expr_has_type(&cond, tcx.types.bool);
3722 let cond_diverging = self.diverges.get();
3723 self.check_block_no_value(&body);
3725 // We may never reach the body so it diverging means nothing.
3726 self.diverges.set(cond_diverging);
3729 if self.has_errors.get() {
3735 hir::ExprLoop(ref body, _, _) => {
3736 let unified = self.next_ty_var(TypeVariableOrigin::TypeInference(body.span));
3737 let coerce_to = expected.only_has_type(self).unwrap_or(unified);
3738 let ctxt = LoopCtxt {
3740 coerce_to: coerce_to,
3741 break_exprs: vec![],
3745 let ctxt = self.with_loop_ctxt(expr.id, ctxt, || {
3746 self.check_block_no_value(&body);
3749 // No way to know whether it's diverging because
3750 // of a `break` or an outer `break` or `return.
3751 self.diverges.set(Diverges::Maybe);
3758 hir::ExprMatch(ref discrim, ref arms, match_src) => {
3759 self.check_match(expr, &discrim, arms, expected, match_src)
3761 hir::ExprClosure(capture, ref decl, body_id, _) => {
3762 self.check_expr_closure(expr, capture, &decl, body_id, expected)
3764 hir::ExprBlock(ref b) => {
3765 self.check_block_with_expected(&b, expected)
3767 hir::ExprCall(ref callee, ref args) => {
3768 self.check_call(expr, &callee, args, expected)
3770 hir::ExprMethodCall(name, ref tps, ref args) => {
3771 self.check_method_call(expr, name, args, &tps[..], expected, lvalue_pref)
3773 hir::ExprCast(ref e, ref t) => {
3774 // Find the type of `e`. Supply hints based on the type we are casting to,
3776 let t_cast = self.to_ty(t);
3777 let t_cast = self.resolve_type_vars_if_possible(&t_cast);
3778 let t_expr = self.check_expr_with_expectation(e, ExpectCastableToType(t_cast));
3779 let t_cast = self.resolve_type_vars_if_possible(&t_cast);
3781 // Eagerly check for some obvious errors.
3782 if t_expr.references_error() || t_cast.references_error() {
3785 // Defer other checks until we're done type checking.
3786 let mut deferred_cast_checks = self.deferred_cast_checks.borrow_mut();
3787 match cast::CastCheck::new(self, e, t_expr, t_cast, t.span, expr.span) {
3789 deferred_cast_checks.push(cast_check);
3792 Err(ErrorReported) => {
3798 hir::ExprType(ref e, ref t) => {
3799 let typ = self.to_ty(&t);
3800 self.check_expr_eq_type(&e, typ);
3803 hir::ExprArray(ref args) => {
3804 let uty = expected.to_option(self).and_then(|uty| {
3806 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3811 let mut unified = self.next_ty_var(TypeVariableOrigin::TypeInference(expr.span));
3812 let coerce_to = uty.unwrap_or(unified);
3814 for (i, e) in args.iter().enumerate() {
3815 let e_ty = self.check_expr_with_hint(e, coerce_to);
3816 let cause = self.misc(e.span);
3818 // Special-case the first element, as it has no "previous expressions".
3819 let result = if i == 0 {
3820 self.try_coerce(e, e_ty, coerce_to)
3822 let prev_elems = || args[..i].iter().map(|e| &*e);
3823 self.try_find_coercion_lub(&cause, prev_elems, unified, e, e_ty)
3827 Ok(ty) => unified = ty,
3829 self.report_mismatched_types(&cause, unified, e_ty, e).emit();
3833 tcx.mk_array(unified, args.len())
3835 hir::ExprRepeat(ref element, count) => {
3836 let count = eval_length(self.tcx.global_tcx(), count, "repeat count")
3839 let uty = match expected {
3840 ExpectHasType(uty) => {
3842 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3849 let (element_ty, t) = match uty {
3851 self.check_expr_coercable_to_type(&element, uty);
3855 let t: Ty = self.next_ty_var(TypeVariableOrigin::MiscVariable(element.span));
3856 let element_ty = self.check_expr_has_type(&element, t);
3862 // For [foo, ..n] where n > 1, `foo` must have
3864 let lang_item = self.tcx.require_lang_item(lang_items::CopyTraitLangItem);
3865 self.require_type_meets(t, expr.span, traits::RepeatVec, lang_item);
3868 if element_ty.references_error() {
3871 tcx.mk_array(t, count)
3874 hir::ExprTup(ref elts) => {
3875 let flds = expected.only_has_type(self).and_then(|ty| {
3877 ty::TyTuple(ref flds) => Some(&flds[..]),
3882 let elt_ts_iter = elts.iter().enumerate().map(|(i, e)| {
3883 let t = match flds {
3884 Some(ref fs) if i < fs.len() => {
3886 self.check_expr_coercable_to_type(&e, ety);
3890 self.check_expr_with_expectation(&e, NoExpectation)
3895 let tuple = tcx.mk_tup(elt_ts_iter);
3896 if tuple.references_error() {
3902 hir::ExprStruct(ref qpath, ref fields, ref base_expr) => {
3903 self.check_expr_struct(expr, qpath, fields, base_expr)
3905 hir::ExprField(ref base, ref field) => {
3906 self.check_field(expr, lvalue_pref, &base, field)
3908 hir::ExprTupField(ref base, idx) => {
3909 self.check_tup_field(expr, lvalue_pref, &base, idx)
3911 hir::ExprIndex(ref base, ref idx) => {
3912 let base_t = self.check_expr_with_lvalue_pref(&base, lvalue_pref);
3913 let idx_t = self.check_expr(&idx);
3915 if base_t.references_error() {
3917 } else if idx_t.references_error() {
3920 let base_t = self.structurally_resolved_type(expr.span, base_t);
3921 match self.lookup_indexing(expr, base, base_t, idx_t, lvalue_pref) {
3922 Some((index_ty, element_ty)) => {
3923 self.demand_eqtype(expr.span, index_ty, idx_t);
3927 self.check_expr_has_type(&idx, self.tcx.types.err);
3928 let mut err = self.type_error_struct(
3931 format!("cannot index a value of type `{}`",
3935 // Try to give some advice about indexing tuples.
3936 if let ty::TyTuple(_) = base_t.sty {
3937 let mut needs_note = true;
3938 // If the index is an integer, we can show the actual
3939 // fixed expression:
3940 if let hir::ExprLit(ref lit) = idx.node {
3941 if let ast::LitKind::Int(i,
3942 ast::LitIntType::Unsuffixed) = lit.node {
3943 let snip = tcx.sess.codemap().span_to_snippet(base.span);
3944 if let Ok(snip) = snip {
3945 err.span_suggestion(expr.span,
3946 "to access tuple elements, \
3947 use tuple indexing syntax \
3949 format!("{}.{}", snip, i));
3955 err.help("to access tuple elements, use tuple indexing \
3956 syntax (e.g. `tuple.0`)");
3968 // Finish resolving a path in a struct expression or pattern `S::A { .. }` if necessary.
3969 // The newly resolved definition is written into `type_relative_path_defs`.
3970 fn finish_resolving_struct_path(&self,
3973 node_id: ast::NodeId)
3977 hir::QPath::Resolved(ref maybe_qself, ref path) => {
3978 let opt_self_ty = maybe_qself.as_ref().map(|qself| self.to_ty(qself));
3979 let ty = AstConv::def_to_ty(self, self, opt_self_ty, path, node_id, true);
3982 hir::QPath::TypeRelative(ref qself, ref segment) => {
3983 let ty = self.to_ty(qself);
3985 let def = if let hir::TyPath(hir::QPath::Resolved(_, ref path)) = qself.node {
3990 let (ty, def) = AstConv::associated_path_def_to_ty(self, node_id, path_span,
3993 // Write back the new resolution.
3994 self.tables.borrow_mut().type_relative_path_defs.insert(node_id, def);
4001 // Resolve associated value path into a base type and associated constant or method definition.
4002 // The newly resolved definition is written into `type_relative_path_defs`.
4003 pub fn resolve_ty_and_def_ufcs<'b>(&self,
4004 qpath: &'b hir::QPath,
4005 node_id: ast::NodeId,
4007 -> (Def, Option<Ty<'tcx>>, &'b [hir::PathSegment])
4009 let (ty, item_segment) = match *qpath {
4010 hir::QPath::Resolved(ref opt_qself, ref path) => {
4012 opt_qself.as_ref().map(|qself| self.to_ty(qself)),
4013 &path.segments[..]);
4015 hir::QPath::TypeRelative(ref qself, ref segment) => {
4016 (self.to_ty(qself), segment)
4019 let item_name = item_segment.name;
4020 let def = match self.resolve_ufcs(span, item_name, ty, node_id) {
4023 let def = match error {
4024 method::MethodError::PrivateMatch(def) => def,
4027 if item_name != keywords::Invalid.name() {
4028 self.report_method_error(span, ty, item_name, None, error, None);
4034 // Write back the new resolution.
4035 self.tables.borrow_mut().type_relative_path_defs.insert(node_id, def);
4036 (def, Some(ty), slice::ref_slice(&**item_segment))
4039 pub fn check_decl_initializer(&self,
4040 local: &'gcx hir::Local,
4041 init: &'gcx hir::Expr) -> Ty<'tcx>
4043 let ref_bindings = local.pat.contains_ref_binding();
4045 let local_ty = self.local_ty(init.span, local.id);
4046 if let Some(m) = ref_bindings {
4047 // Somewhat subtle: if we have a `ref` binding in the pattern,
4048 // we want to avoid introducing coercions for the RHS. This is
4049 // both because it helps preserve sanity and, in the case of
4050 // ref mut, for soundness (issue #23116). In particular, in
4051 // the latter case, we need to be clear that the type of the
4052 // referent for the reference that results is *equal to* the
4053 // type of the lvalue it is referencing, and not some
4054 // supertype thereof.
4055 let init_ty = self.check_expr_with_lvalue_pref(init, LvaluePreference::from_mutbl(m));
4056 self.demand_eqtype(init.span, init_ty, local_ty);
4059 self.check_expr_coercable_to_type(init, local_ty)
4063 pub fn check_decl_local(&self, local: &'gcx hir::Local) {
4064 let t = self.local_ty(local.span, local.id);
4065 self.write_ty(local.id, t);
4067 if let Some(ref init) = local.init {
4068 let init_ty = self.check_decl_initializer(local, &init);
4069 if init_ty.references_error() {
4070 self.write_ty(local.id, init_ty);
4074 self.check_pat(&local.pat, t);
4075 let pat_ty = self.node_ty(local.pat.id);
4076 if pat_ty.references_error() {
4077 self.write_ty(local.id, pat_ty);
4081 pub fn check_stmt(&self, stmt: &'gcx hir::Stmt) {
4082 // Don't do all the complex logic below for DeclItem.
4084 hir::StmtDecl(ref decl, id) => {
4086 hir::DeclLocal(_) => {}
4087 hir::DeclItem(_) => {
4093 hir::StmtExpr(..) | hir::StmtSemi(..) => {}
4096 self.warn_if_unreachable(stmt.node.id(), stmt.span, "statement");
4098 // Hide the outer diverging and has_errors flags.
4099 let old_diverges = self.diverges.get();
4100 let old_has_errors = self.has_errors.get();
4101 self.diverges.set(Diverges::Maybe);
4102 self.has_errors.set(false);
4104 let (node_id, span) = match stmt.node {
4105 hir::StmtDecl(ref decl, id) => {
4106 let span = match decl.node {
4107 hir::DeclLocal(ref l) => {
4108 self.check_decl_local(&l);
4111 hir::DeclItem(_) => {/* ignore for now */
4117 hir::StmtExpr(ref expr, id) => {
4118 // Check with expected type of ()
4119 self.check_expr_has_type(&expr, self.tcx.mk_nil());
4122 hir::StmtSemi(ref expr, id) => {
4123 self.check_expr(&expr);
4128 if self.has_errors.get() {
4129 self.write_error(node_id);
4130 } else if self.diverges.get().always() {
4131 self.write_ty(node_id, self.next_diverging_ty_var(
4132 TypeVariableOrigin::DivergingStmt(span)));
4134 self.write_nil(node_id);
4137 // Combine the diverging and has_error flags.
4138 self.diverges.set(self.diverges.get() | old_diverges);
4139 self.has_errors.set(self.has_errors.get() | old_has_errors);
4142 pub fn check_block_no_value(&self, blk: &'gcx hir::Block) {
4143 let unit = self.tcx.mk_nil();
4144 let ty = self.check_block_with_expected(blk, ExpectHasType(unit));
4145 self.demand_suptype(blk.span, unit, ty);
4148 fn check_block_with_expected(&self,
4149 blk: &'gcx hir::Block,
4150 expected: Expectation<'tcx>) -> Ty<'tcx> {
4152 let mut fcx_ps = self.ps.borrow_mut();
4153 let unsafety_state = fcx_ps.recurse(blk);
4154 replace(&mut *fcx_ps, unsafety_state)
4157 for s in &blk.stmts {
4161 let mut ty = match blk.expr {
4162 Some(ref e) => self.check_expr_with_expectation(e, expected),
4163 None => self.tcx.mk_nil()
4166 if self.diverges.get().always() {
4167 if let ExpectHasType(ety) = expected {
4168 // Avoid forcing a type (only `!` for now) in unreachable code.
4169 // FIXME(aburka) do we need this special case? and should it be is_uninhabited?
4170 if !ety.is_never() {
4171 if let Some(ref e) = blk.expr {
4172 // Coerce the tail expression to the right type.
4173 self.demand_coerce(e, ty, ety);
4178 ty = self.next_diverging_ty_var(TypeVariableOrigin::DivergingBlockExpr(blk.span));
4179 } else if let ExpectHasType(ety) = expected {
4180 if let Some(ref e) = blk.expr {
4181 // Coerce the tail expression to the right type.
4182 self.demand_coerce(e, ty, ety);
4184 // We're not diverging and there's an expected type, which,
4185 // in case it's not `()`, could result in an error higher-up.
4186 // We have a chance to error here early and be more helpful.
4187 let cause = self.misc(blk.span);
4188 let trace = TypeTrace::types(&cause, false, ty, ety);
4189 match self.sub_types(false, &cause, ty, ety) {
4190 Ok(InferOk { obligations, .. }) => {
4191 // FIXME(#32730) propagate obligations
4192 assert!(obligations.is_empty());
4195 let mut err = self.report_and_explain_type_error(trace, &err);
4197 // Be helpful when the user wrote `{... expr;}` and
4198 // taking the `;` off is enough to fix the error.
4199 let mut extra_semi = None;
4200 if let Some(stmt) = blk.stmts.last() {
4201 if let hir::StmtSemi(ref e, _) = stmt.node {
4202 if self.can_sub_types(self.node_ty(e.id), ety).is_ok() {
4203 extra_semi = Some(stmt);
4207 if let Some(last_stmt) = extra_semi {
4208 let original_span = original_sp(self.tcx.sess.codemap(),
4209 last_stmt.span, blk.span);
4210 let span_semi = Span {
4211 lo: original_span.hi - BytePos(1),
4212 hi: original_span.hi,
4213 expn_id: original_span.expn_id
4215 err.span_help(span_semi, "consider removing this semicolon:");
4223 // We already applied the type (and potentially errored),
4224 // use the expected type to avoid further errors out.
4228 if self.has_errors.get() || ty.references_error() {
4229 ty = self.tcx.types.err
4232 self.write_ty(blk.id, ty);
4234 *self.ps.borrow_mut() = prev;
4238 // Instantiates the given path, which must refer to an item with the given
4239 // number of type parameters and type.
4240 pub fn instantiate_value_path(&self,
4241 segments: &[hir::PathSegment],
4242 opt_self_ty: Option<Ty<'tcx>>,
4245 node_id: ast::NodeId)
4247 debug!("instantiate_value_path(path={:?}, def={:?}, node_id={})",
4252 // We need to extract the type parameters supplied by the user in
4253 // the path `path`. Due to the current setup, this is a bit of a
4254 // tricky-process; the problem is that resolve only tells us the
4255 // end-point of the path resolution, and not the intermediate steps.
4256 // Luckily, we can (at least for now) deduce the intermediate steps
4257 // just from the end-point.
4259 // There are basically four cases to consider:
4261 // 1. Reference to a constructor of enum variant or struct:
4263 // struct Foo<T>(...)
4264 // enum E<T> { Foo(...) }
4266 // In these cases, the parameters are declared in the type
4269 // 2. Reference to a fn item or a free constant:
4273 // In this case, the path will again always have the form
4274 // `a::b::foo::<T>` where only the final segment should have
4275 // type parameters. However, in this case, those parameters are
4276 // declared on a value, and hence are in the `FnSpace`.
4278 // 3. Reference to a method or an associated constant:
4280 // impl<A> SomeStruct<A> {
4284 // Here we can have a path like
4285 // `a::b::SomeStruct::<A>::foo::<B>`, in which case parameters
4286 // may appear in two places. The penultimate segment,
4287 // `SomeStruct::<A>`, contains parameters in TypeSpace, and the
4288 // final segment, `foo::<B>` contains parameters in fn space.
4290 // 4. Reference to a local variable
4292 // Local variables can't have any type parameters.
4294 // The first step then is to categorize the segments appropriately.
4296 assert!(!segments.is_empty());
4298 let mut ufcs_associated = None;
4299 let mut type_segment = None;
4300 let mut fn_segment = None;
4302 // Case 1. Reference to a struct/variant constructor.
4303 Def::StructCtor(def_id, ..) |
4304 Def::VariantCtor(def_id, ..) => {
4305 // Everything but the final segment should have no
4306 // parameters at all.
4307 let mut generics = self.tcx.item_generics(def_id);
4308 if let Some(def_id) = generics.parent {
4309 // Variant and struct constructors use the
4310 // generics of their parent type definition.
4311 generics = self.tcx.item_generics(def_id);
4313 type_segment = Some((segments.last().unwrap(), generics));
4316 // Case 2. Reference to a top-level value.
4318 Def::Const(def_id) |
4319 Def::Static(def_id, _) => {
4320 fn_segment = Some((segments.last().unwrap(),
4321 self.tcx.item_generics(def_id)));
4324 // Case 3. Reference to a method or associated const.
4325 Def::Method(def_id) |
4326 Def::AssociatedConst(def_id) => {
4327 let container = self.tcx.associated_item(def_id).container;
4329 ty::TraitContainer(trait_did) => {
4330 callee::check_legal_trait_for_method_call(self.ccx, span, trait_did)
4332 ty::ImplContainer(_) => {}
4335 let generics = self.tcx.item_generics(def_id);
4336 if segments.len() >= 2 {
4337 let parent_generics = self.tcx.item_generics(generics.parent.unwrap());
4338 type_segment = Some((&segments[segments.len() - 2], parent_generics));
4340 // `<T>::assoc` will end up here, and so can `T::assoc`.
4341 let self_ty = opt_self_ty.expect("UFCS sugared assoc missing Self");
4342 ufcs_associated = Some((container, self_ty));
4344 fn_segment = Some((segments.last().unwrap(), generics));
4347 // Case 4. Local variable, no generics.
4348 Def::Local(..) | Def::Upvar(..) => {}
4350 _ => bug!("unexpected definition: {:?}", def),
4353 debug!("type_segment={:?} fn_segment={:?}", type_segment, fn_segment);
4355 // Now that we have categorized what space the parameters for each
4356 // segment belong to, let's sort out the parameters that the user
4357 // provided (if any) into their appropriate spaces. We'll also report
4358 // errors if type parameters are provided in an inappropriate place.
4359 let poly_segments = type_segment.is_some() as usize +
4360 fn_segment.is_some() as usize;
4361 self.tcx.prohibit_type_params(&segments[..segments.len() - poly_segments]);
4364 Def::Local(def_id) | Def::Upvar(def_id, ..) => {
4365 let nid = self.tcx.hir.as_local_node_id(def_id).unwrap();
4366 let ty = self.local_ty(span, nid);
4367 let ty = self.normalize_associated_types_in(span, &ty);
4368 self.write_ty(node_id, ty);
4369 self.write_substs(node_id, ty::ItemSubsts {
4370 substs: self.tcx.intern_substs(&[])
4377 // Now we have to compare the types that the user *actually*
4378 // provided against the types that were *expected*. If the user
4379 // did not provide any types, then we want to substitute inference
4380 // variables. If the user provided some types, we may still need
4381 // to add defaults. If the user provided *too many* types, that's
4383 self.check_path_parameter_count(span, &mut type_segment);
4384 self.check_path_parameter_count(span, &mut fn_segment);
4386 let (fn_start, has_self) = match (type_segment, fn_segment) {
4387 (_, Some((_, generics))) => {
4388 (generics.parent_count(), generics.has_self)
4390 (Some((_, generics)), None) => {
4391 (generics.own_count(), generics.has_self)
4393 (None, None) => (0, false)
4395 let substs = Substs::for_item(self.tcx, def.def_id(), |def, _| {
4396 let mut i = def.index as usize;
4398 let segment = if i < fn_start {
4399 i -= has_self as usize;
4405 let lifetimes = match segment.map(|(s, _)| &s.parameters) {
4406 Some(&hir::AngleBracketedParameters(ref data)) => &data.lifetimes[..],
4407 Some(&hir::ParenthesizedParameters(_)) => bug!(),
4411 AstConv::opt_ast_region_to_region(self, self, span, lifetimes.get(i), Some(def))
4413 let mut i = def.index as usize;
4415 let segment = if i < fn_start {
4416 // Handle Self first, so we can adjust the index to match the AST.
4417 if has_self && i == 0 {
4418 return opt_self_ty.unwrap_or_else(|| {
4419 self.type_var_for_def(span, def, substs)
4422 i -= has_self as usize;
4428 let (types, infer_types) = match segment.map(|(s, _)| &s.parameters) {
4429 Some(&hir::AngleBracketedParameters(ref data)) => {
4430 (&data.types[..], data.infer_types)
4432 Some(&hir::ParenthesizedParameters(_)) => bug!(),
4433 None => (&[][..], true)
4436 // Skip over the lifetimes in the same segment.
4437 if let Some((_, generics)) = segment {
4438 i -= generics.regions.len();
4441 if let Some(ast_ty) = types.get(i) {
4442 // A provided type parameter.
4444 } else if let (false, Some(default)) = (infer_types, def.default) {
4445 // No type parameter provided, but a default exists.
4446 default.subst_spanned(self.tcx, substs, Some(span))
4448 // No type parameters were provided, we can infer all.
4449 // This can also be reached in some error cases:
4450 // We prefer to use inference variables instead of
4451 // TyError to let type inference recover somewhat.
4452 self.type_var_for_def(span, def, substs)
4456 // The things we are substituting into the type should not contain
4457 // escaping late-bound regions, and nor should the base type scheme.
4458 let ty = self.tcx.item_type(def.def_id());
4459 assert!(!substs.has_escaping_regions());
4460 assert!(!ty.has_escaping_regions());
4462 // Add all the obligations that are required, substituting and
4463 // normalized appropriately.
4464 let bounds = self.instantiate_bounds(span, def.def_id(), &substs);
4465 self.add_obligations_for_parameters(
4466 traits::ObligationCause::new(span, self.body_id, traits::ItemObligation(def.def_id())),
4469 // Substitute the values for the type parameters into the type of
4470 // the referenced item.
4471 let ty_substituted = self.instantiate_type_scheme(span, &substs, &ty);
4473 if let Some((ty::ImplContainer(impl_def_id), self_ty)) = ufcs_associated {
4474 // In the case of `Foo<T>::method` and `<Foo<T>>::method`, if `method`
4475 // is inherent, there is no `Self` parameter, instead, the impl needs
4476 // type parameters, which we can infer by unifying the provided `Self`
4477 // with the substituted impl type.
4478 let ty = self.tcx.item_type(impl_def_id);
4480 let impl_ty = self.instantiate_type_scheme(span, &substs, &ty);
4481 match self.sub_types(false, &self.misc(span), self_ty, impl_ty) {
4482 Ok(ok) => self.register_infer_ok_obligations(ok),
4485 "instantiate_value_path: (UFCS) {:?} was a subtype of {:?} but now is not?",
4492 debug!("instantiate_value_path: type of {:?} is {:?}",
4495 self.write_substs(node_id, ty::ItemSubsts {
4501 /// Report errors if the provided parameters are too few or too many.
4502 fn check_path_parameter_count(&self,
4504 segment: &mut Option<(&hir::PathSegment, &ty::Generics)>) {
4505 let (lifetimes, types, infer_types, bindings) = {
4506 match segment.map(|(s, _)| &s.parameters) {
4507 Some(&hir::AngleBracketedParameters(ref data)) => {
4508 (&data.lifetimes[..], &data.types[..], data.infer_types, &data.bindings[..])
4510 Some(&hir::ParenthesizedParameters(_)) => {
4511 span_bug!(span, "parenthesized parameters cannot appear in ExprPath");
4513 None => (&[][..], &[][..], true, &[][..])
4518 format!("{} parameter{}", n, if n == 1 { "" } else { "s" })
4521 // Check provided lifetime parameters.
4522 let lifetime_defs = segment.map_or(&[][..], |(_, generics)| &generics.regions);
4523 if lifetimes.len() > lifetime_defs.len() {
4524 struct_span_err!(self.tcx.sess, span, E0088,
4525 "too many lifetime parameters provided: \
4526 expected {}, found {}",
4527 count(lifetime_defs.len()),
4528 count(lifetimes.len()))
4529 .span_label(span, &format!("unexpected lifetime parameter{}",
4530 match lifetimes.len() { 1 => "", _ => "s" }))
4532 } else if lifetimes.len() > 0 && lifetimes.len() < lifetime_defs.len() {
4533 struct_span_err!(self.tcx.sess, span, E0090,
4534 "too few lifetime parameters provided: \
4535 expected {}, found {}",
4536 count(lifetime_defs.len()),
4537 count(lifetimes.len()))
4538 .span_label(span, &format!("too few lifetime parameters"))
4542 // The case where there is not enough lifetime parameters is not checked,
4543 // because this is not possible - a function never takes lifetime parameters.
4544 // See discussion for Pull Request 36208.
4546 // Check provided type parameters.
4547 let type_defs = segment.map_or(&[][..], |(_, generics)| {
4548 if generics.parent.is_none() {
4549 &generics.types[generics.has_self as usize..]
4554 let required_len = type_defs.iter()
4555 .take_while(|d| d.default.is_none())
4557 if types.len() > type_defs.len() {
4558 let span = types[type_defs.len()].span;
4559 struct_span_err!(self.tcx.sess, span, E0087,
4560 "too many type parameters provided: \
4561 expected at most {}, found {}",
4562 count(type_defs.len()),
4564 .span_label(span, &format!("too many type parameters")).emit();
4566 // To prevent derived errors to accumulate due to extra
4567 // type parameters, we force instantiate_value_path to
4568 // use inference variables instead of the provided types.
4570 } else if !infer_types && types.len() < required_len {
4571 let adjust = |len| if len > 1 { "parameters" } else { "parameter" };
4572 let required_param_str = adjust(required_len);
4573 let actual_param_str = adjust(types.len());
4574 struct_span_err!(self.tcx.sess, span, E0089,
4575 "too few type parameters provided: \
4576 expected {} {}, found {} {}",
4577 count(required_len),
4581 .span_label(span, &format!("expected {} type {}", required_len, required_param_str))
4585 if !bindings.is_empty() {
4586 span_err!(self.tcx.sess, bindings[0].span, E0182,
4587 "unexpected binding of associated item in expression path \
4588 (only allowed in type paths)");
4592 fn structurally_resolve_type_or_else<F>(&self, sp: Span, ty: Ty<'tcx>, f: F)
4594 where F: Fn() -> Ty<'tcx>
4596 let mut ty = self.resolve_type_vars_with_obligations(ty);
4599 let alternative = f();
4602 if alternative.is_ty_var() || alternative.references_error() {
4603 if !self.is_tainted_by_errors() {
4604 self.type_error_message(sp, |_actual| {
4605 "the type of this value must be known in this context".to_string()
4608 self.demand_suptype(sp, self.tcx.types.err, ty);
4609 ty = self.tcx.types.err;
4611 self.demand_suptype(sp, alternative, ty);
4619 // Resolves `typ` by a single level if `typ` is a type variable. If no
4620 // resolution is possible, then an error is reported.
4621 pub fn structurally_resolved_type(&self, sp: Span, ty: Ty<'tcx>) -> Ty<'tcx> {
4622 self.structurally_resolve_type_or_else(sp, ty, || {
4627 fn with_loop_ctxt<F: FnOnce()>(&self, id: ast::NodeId, ctxt: LoopCtxt<'gcx, 'tcx>, f: F)
4628 -> LoopCtxt<'gcx, 'tcx> {
4631 let mut enclosing_loops = self.enclosing_loops.borrow_mut();
4632 index = enclosing_loops.stack.len();
4633 enclosing_loops.by_id.insert(id, index);
4634 enclosing_loops.stack.push(ctxt);
4638 let mut enclosing_loops = self.enclosing_loops.borrow_mut();
4639 debug_assert!(enclosing_loops.stack.len() == index + 1);
4640 enclosing_loops.by_id.remove(&id).expect("missing loop context");
4641 (enclosing_loops.stack.pop().expect("missing loop context"))
4646 pub fn check_bounds_are_used<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
4647 generics: &hir::Generics,
4649 debug!("check_bounds_are_used(n_tps={}, ty={:?})",
4650 generics.ty_params.len(), ty);
4652 // make a vector of booleans initially false, set to true when used
4653 if generics.ty_params.is_empty() { return; }
4654 let mut tps_used = vec![false; generics.ty_params.len()];
4656 for leaf_ty in ty.walk() {
4657 if let ty::TyParam(ParamTy {idx, ..}) = leaf_ty.sty {
4658 debug!("Found use of ty param num {}", idx);
4659 tps_used[idx as usize - generics.lifetimes.len()] = true;
4663 for (&used, param) in tps_used.iter().zip(&generics.ty_params) {
4665 struct_span_err!(ccx.tcx.sess, param.span, E0091,
4666 "type parameter `{}` is unused",
4668 .span_label(param.span, &format!("unused type parameter"))