1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
15 Within the check phase of type check, we check each item one at a time
16 (bodies of function expressions are checked as part of the containing
17 function). Inference is used to supply types wherever they are
20 By far the most complex case is checking the body of a function. This
21 can be broken down into several distinct phases:
23 - gather: creates type variables to represent the type of each local
24 variable and pattern binding.
26 - main: the main pass does the lion's share of the work: it
27 determines the types of all expressions, resolves
28 methods, checks for most invalid conditions, and so forth. In
29 some cases, where a type is unknown, it may create a type or region
30 variable and use that as the type of an expression.
32 In the process of checking, various constraints will be placed on
33 these type variables through the subtyping relationships requested
34 through the `demand` module. The `infer` module is in charge
35 of resolving those constraints.
37 - regionck: after main is complete, the regionck pass goes over all
38 types looking for regions and making sure that they did not escape
39 into places they are not in scope. This may also influence the
40 final assignments of the various region variables if there is some
43 - vtable: find and records the impls to use for each trait bound that
44 appears on a type parameter.
46 - writeback: writes the final types within a function body, replacing
47 type variables with their final inferred types. These final types
48 are written into the `tcx.node_types` table, which should *never* contain
49 any reference to a type variable.
53 While type checking a function, the intermediate types for the
54 expressions, blocks, and so forth contained within the function are
55 stored in `fcx.node_types` and `fcx.item_substs`. These types
56 may contain unresolved type variables. After type checking is
57 complete, the functions in the writeback module are used to take the
58 types from this table, resolve them, and then write them into their
59 permanent home in the type context `ccx.tcx`.
61 This means that during inferencing you should use `fcx.write_ty()`
62 and `fcx.expr_ty()` / `fcx.node_ty()` to write/obtain the types of
63 nodes within the function.
65 The types of top-level items, which never contain unbound type
66 variables, are stored directly into the `tcx` tables.
68 n.b.: A type variable is not the same thing as a type parameter. A
69 type variable is rather an "instance" of a type parameter: that is,
70 given a generic function `fn foo<T>(t: T)`: while checking the
71 function `foo`, the type `ty_param(0)` refers to the type `T`, which
72 is treated in abstract. When `foo()` is called, however, `T` will be
73 substituted for a fresh type variable `N`. This variable will
74 eventually be resolved to some concrete type (which might itself be
79 pub use self::Expectation::*;
80 pub use self::compare_method::{compare_impl_method, compare_const_impl};
81 use self::TupleArgumentsFlag::*;
83 use astconv::{AstConv, ast_region_to_region, PathParamMode};
84 use dep_graph::DepNode;
85 use fmt_macros::{Parser, Piece, Position};
86 use middle::cstore::LOCAL_CRATE;
87 use hir::def::{Def, PathResolution};
88 use hir::def_id::DefId;
90 use rustc::infer::{self, InferCtxt, InferOk, TypeOrigin, TypeTrace, type_variable};
91 use rustc::ty::subst::{Subst, Substs};
92 use rustc::traits::{self, Reveal};
93 use rustc::ty::{ParamTy, ParameterEnvironment};
94 use rustc::ty::{LvaluePreference, NoPreference, PreferMutLvalue};
95 use rustc::ty::{self, ToPolyTraitRef, Ty, TyCtxt, Visibility};
96 use rustc::ty::{MethodCall, MethodCallee};
97 use rustc::ty::adjustment;
98 use rustc::ty::fold::{BottomUpFolder, TypeFoldable};
99 use rustc::ty::util::{Representability, IntTypeExt};
100 use require_c_abi_if_variadic;
101 use rscope::{ElisionFailureInfo, RegionScope};
102 use session::{Session, CompileResult};
106 use util::common::{block_query, ErrorReported, indenter, loop_query};
107 use util::nodemap::{DefIdMap, FnvHashMap, FnvHashSet, NodeMap};
109 use std::cell::{Cell, Ref, RefCell};
110 use std::mem::replace;
112 use syntax::abi::Abi;
115 use syntax::codemap::{self, Spanned};
116 use syntax::feature_gate::{GateIssue, emit_feature_err};
117 use syntax::parse::token::{self, InternedString, keywords};
119 use syntax::util::lev_distance::find_best_match_for_name;
120 use syntax_pos::{self, Span};
121 use errors::DiagnosticBuilder;
123 use rustc::hir::intravisit::{self, Visitor};
124 use rustc::hir::{self, PatKind};
125 use rustc::hir::print as pprust;
126 use rustc_back::slice;
127 use rustc_const_eval::eval_length;
147 /// closures defined within the function. For example:
150 /// bar(move|| { ... })
153 /// Here, the function `foo()` and the closure passed to
154 /// `bar()` will each have their own `FnCtxt`, but they will
155 /// share the inherited fields.
156 pub struct Inherited<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
157 ccx: &'a CrateCtxt<'a, 'gcx>,
158 infcx: InferCtxt<'a, 'gcx, 'tcx>,
159 locals: RefCell<NodeMap<Ty<'tcx>>>,
161 fulfillment_cx: RefCell<traits::FulfillmentContext<'tcx>>,
163 // When we process a call like `c()` where `c` is a closure type,
164 // we may not have decided yet whether `c` is a `Fn`, `FnMut`, or
165 // `FnOnce` closure. In that case, we defer full resolution of the
166 // call until upvar inference can kick in and make the
167 // decision. We keep these deferred resolutions grouped by the
168 // def-id of the closure, so that once we decide, we can easily go
169 // back and process them.
170 deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolutionHandler<'gcx, 'tcx>>>>,
172 deferred_cast_checks: RefCell<Vec<cast::CastCheck<'tcx>>>,
174 // Anonymized types found in explicit return types and their
175 // associated fresh inference variable. Writeback resolves these
176 // variables to get the concrete type, which can be used to
177 // deanonymize TyAnon, after typeck is done with all functions.
178 anon_types: RefCell<DefIdMap<Ty<'tcx>>>,
180 // Obligations which will have to be checked at the end of
181 // type-checking, after all functions have been inferred.
182 deferred_obligations: RefCell<Vec<traits::DeferredObligation<'tcx>>>,
185 impl<'a, 'gcx, 'tcx> Deref for Inherited<'a, 'gcx, 'tcx> {
186 type Target = InferCtxt<'a, 'gcx, 'tcx>;
187 fn deref(&self) -> &Self::Target {
192 trait DeferredCallResolution<'gcx, 'tcx> {
193 fn resolve<'a>(&mut self, fcx: &FnCtxt<'a, 'gcx, 'tcx>);
196 type DeferredCallResolutionHandler<'gcx, 'tcx> = Box<DeferredCallResolution<'gcx, 'tcx>+'tcx>;
198 /// When type-checking an expression, we propagate downward
199 /// whatever type hint we are able in the form of an `Expectation`.
200 #[derive(Copy, Clone, Debug)]
201 pub enum Expectation<'tcx> {
202 /// We know nothing about what type this expression should have.
205 /// This expression should have the type given (or some subtype)
206 ExpectHasType(Ty<'tcx>),
208 /// This expression will be cast to the `Ty`
209 ExpectCastableToType(Ty<'tcx>),
211 /// This rvalue expression will be wrapped in `&` or `Box` and coerced
212 /// to `&Ty` or `Box<Ty>`, respectively. `Ty` is `[A]` or `Trait`.
213 ExpectRvalueLikeUnsized(Ty<'tcx>),
216 impl<'a, 'gcx, 'tcx> Expectation<'tcx> {
217 // Disregard "castable to" expectations because they
218 // can lead us astray. Consider for example `if cond
219 // {22} else {c} as u8` -- if we propagate the
220 // "castable to u8" constraint to 22, it will pick the
221 // type 22u8, which is overly constrained (c might not
222 // be a u8). In effect, the problem is that the
223 // "castable to" expectation is not the tightest thing
224 // we can say, so we want to drop it in this case.
225 // The tightest thing we can say is "must unify with
226 // else branch". Note that in the case of a "has type"
227 // constraint, this limitation does not hold.
229 // If the expected type is just a type variable, then don't use
230 // an expected type. Otherwise, we might write parts of the type
231 // when checking the 'then' block which are incompatible with the
233 fn adjust_for_branches(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Expectation<'tcx> {
235 ExpectHasType(ety) => {
236 let ety = fcx.shallow_resolve(ety);
237 if !ety.is_ty_var() {
243 ExpectRvalueLikeUnsized(ety) => {
244 ExpectRvalueLikeUnsized(ety)
250 /// Provide an expectation for an rvalue expression given an *optional*
251 /// hint, which is not required for type safety (the resulting type might
252 /// be checked higher up, as is the case with `&expr` and `box expr`), but
253 /// is useful in determining the concrete type.
255 /// The primary use case is where the expected type is a fat pointer,
256 /// like `&[isize]`. For example, consider the following statement:
258 /// let x: &[isize] = &[1, 2, 3];
260 /// In this case, the expected type for the `&[1, 2, 3]` expression is
261 /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
262 /// expectation `ExpectHasType([isize])`, that would be too strong --
263 /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
264 /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
265 /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
266 /// which still is useful, because it informs integer literals and the like.
267 /// See the test case `test/run-pass/coerce-expect-unsized.rs` and #20169
268 /// for examples of where this comes up,.
269 fn rvalue_hint(fcx: &FnCtxt<'a, 'gcx, 'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> {
270 match fcx.tcx.struct_tail(ty).sty {
271 ty::TySlice(_) | ty::TyStr | ty::TyTrait(..) => {
272 ExpectRvalueLikeUnsized(ty)
274 _ => ExpectHasType(ty)
278 // Resolves `expected` by a single level if it is a variable. If
279 // there is no expected type or resolution is not possible (e.g.,
280 // no constraints yet present), just returns `None`.
281 fn resolve(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Expectation<'tcx> {
286 ExpectCastableToType(t) => {
287 ExpectCastableToType(fcx.resolve_type_vars_if_possible(&t))
289 ExpectHasType(t) => {
290 ExpectHasType(fcx.resolve_type_vars_if_possible(&t))
292 ExpectRvalueLikeUnsized(t) => {
293 ExpectRvalueLikeUnsized(fcx.resolve_type_vars_if_possible(&t))
298 fn to_option(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
299 match self.resolve(fcx) {
300 NoExpectation => None,
301 ExpectCastableToType(ty) |
303 ExpectRvalueLikeUnsized(ty) => Some(ty),
307 fn only_has_type(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
308 match self.resolve(fcx) {
309 ExpectHasType(ty) => Some(ty),
315 #[derive(Copy, Clone)]
316 pub struct UnsafetyState {
317 pub def: ast::NodeId,
318 pub unsafety: hir::Unsafety,
319 pub unsafe_push_count: u32,
324 pub fn function(unsafety: hir::Unsafety, def: ast::NodeId) -> UnsafetyState {
325 UnsafetyState { def: def, unsafety: unsafety, unsafe_push_count: 0, from_fn: true }
328 pub fn recurse(&mut self, blk: &hir::Block) -> UnsafetyState {
329 match self.unsafety {
330 // If this unsafe, then if the outer function was already marked as
331 // unsafe we shouldn't attribute the unsafe'ness to the block. This
332 // way the block can be warned about instead of ignoring this
333 // extraneous block (functions are never warned about).
334 hir::Unsafety::Unsafe if self.from_fn => *self,
337 let (unsafety, def, count) = match blk.rules {
338 hir::PushUnsafeBlock(..) =>
339 (unsafety, blk.id, self.unsafe_push_count.checked_add(1).unwrap()),
340 hir::PopUnsafeBlock(..) =>
341 (unsafety, blk.id, self.unsafe_push_count.checked_sub(1).unwrap()),
342 hir::UnsafeBlock(..) =>
343 (hir::Unsafety::Unsafe, blk.id, self.unsafe_push_count),
344 hir::DefaultBlock | hir::PushUnstableBlock | hir:: PopUnstableBlock =>
345 (unsafety, self.def, self.unsafe_push_count),
347 UnsafetyState{ def: def,
349 unsafe_push_count: count,
357 pub struct FnCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
358 ast_ty_to_ty_cache: RefCell<NodeMap<Ty<'tcx>>>,
360 body_id: ast::NodeId,
362 // This flag is set to true if, during the writeback phase, we encounter
363 // a type error in this function.
364 writeback_errors: Cell<bool>,
366 // Number of errors that had been reported when we started
367 // checking this function. On exit, if we find that *more* errors
368 // have been reported, we will skip regionck and other work that
369 // expects the types within the function to be consistent.
370 err_count_on_creation: usize,
374 ps: RefCell<UnsafetyState>,
376 inh: &'a Inherited<'a, 'gcx, 'tcx>,
379 impl<'a, 'gcx, 'tcx> Deref for FnCtxt<'a, 'gcx, 'tcx> {
380 type Target = Inherited<'a, 'gcx, 'tcx>;
381 fn deref(&self) -> &Self::Target {
386 /// Helper type of a temporary returned by ccx.inherited(...).
387 /// Necessary because we can't write the following bound:
388 /// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Inherited<'b, 'gcx, 'tcx>).
389 pub struct InheritedBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
390 ccx: &'a CrateCtxt<'a, 'gcx>,
391 infcx: infer::InferCtxtBuilder<'a, 'gcx, 'tcx>
394 impl<'a, 'gcx, 'tcx> CrateCtxt<'a, 'gcx> {
395 pub fn inherited(&'a self, id: ast::NodeId)
396 -> InheritedBuilder<'a, 'gcx, 'tcx> {
397 let param_env = ParameterEnvironment::for_item(self.tcx, id);
400 infcx: self.tcx.infer_ctxt(Some(ty::Tables::empty()),
402 Reveal::NotSpecializable)
407 impl<'a, 'gcx, 'tcx> InheritedBuilder<'a, 'gcx, 'tcx> {
408 fn enter<F, R>(&'tcx mut self, f: F) -> R
409 where F: for<'b> FnOnce(Inherited<'b, 'gcx, 'tcx>) -> R
412 self.infcx.enter(|infcx| {
416 fulfillment_cx: RefCell::new(traits::FulfillmentContext::new()),
417 locals: RefCell::new(NodeMap()),
418 deferred_call_resolutions: RefCell::new(DefIdMap()),
419 deferred_cast_checks: RefCell::new(Vec::new()),
420 anon_types: RefCell::new(DefIdMap()),
421 deferred_obligations: RefCell::new(Vec::new()),
427 impl<'a, 'gcx, 'tcx> Inherited<'a, 'gcx, 'tcx> {
428 fn normalize_associated_types_in<T>(&self,
430 body_id: ast::NodeId,
433 where T : TypeFoldable<'tcx>
435 assoc::normalize_associated_types_in(self,
436 &mut self.fulfillment_cx.borrow_mut(),
444 struct CheckItemTypesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
445 struct CheckItemBodiesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
447 impl<'a, 'tcx> Visitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> {
448 fn visit_item(&mut self, i: &'tcx hir::Item) {
449 check_item_type(self.ccx, i);
450 intravisit::walk_item(self, i);
453 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
455 hir::TyFixedLengthVec(_, ref expr) => {
456 check_const_with_type(self.ccx, &expr, self.ccx.tcx.types.usize, expr.id);
461 intravisit::walk_ty(self, t);
465 impl<'a, 'tcx> Visitor<'tcx> for CheckItemBodiesVisitor<'a, 'tcx> {
466 fn visit_item(&mut self, i: &'tcx hir::Item) {
467 check_item_body(self.ccx, i);
471 pub fn check_wf_new(ccx: &CrateCtxt) -> CompileResult {
472 ccx.tcx.sess.track_errors(|| {
473 let mut visit = wfcheck::CheckTypeWellFormedVisitor::new(ccx);
474 ccx.tcx.visit_all_items_in_krate(DepNode::WfCheck, &mut visit);
478 pub fn check_item_types(ccx: &CrateCtxt) -> CompileResult {
479 ccx.tcx.sess.track_errors(|| {
480 let mut visit = CheckItemTypesVisitor { ccx: ccx };
481 ccx.tcx.visit_all_items_in_krate(DepNode::TypeckItemType, &mut visit);
485 pub fn check_item_bodies(ccx: &CrateCtxt) -> CompileResult {
486 ccx.tcx.sess.track_errors(|| {
487 let mut visit = CheckItemBodiesVisitor { ccx: ccx };
488 ccx.tcx.visit_all_items_in_krate(DepNode::TypeckItemBody, &mut visit);
490 // Process deferred obligations, now that all functions
491 // bodies have been fully inferred.
492 for (&item_id, obligations) in ccx.deferred_obligations.borrow().iter() {
493 // Use the same DepNode as for the body of the original function/item.
494 let def_id = ccx.tcx.map.local_def_id(item_id);
495 let _task = ccx.tcx.dep_graph.in_task(DepNode::TypeckItemBody(def_id));
497 let param_env = ParameterEnvironment::for_item(ccx.tcx, item_id);
498 ccx.tcx.infer_ctxt(None, Some(param_env),
499 Reveal::NotSpecializable).enter(|infcx| {
500 let mut fulfillment_cx = traits::FulfillmentContext::new();
501 for obligation in obligations.iter().map(|o| o.to_obligation()) {
502 fulfillment_cx.register_predicate_obligation(&infcx, obligation);
505 if let Err(errors) = fulfillment_cx.select_all_or_error(&infcx) {
506 infcx.report_fulfillment_errors(&errors);
509 if let Err(errors) = fulfillment_cx.select_rfc1592_obligations(&infcx) {
510 infcx.report_fulfillment_errors_as_warnings(&errors, item_id);
517 pub fn check_drop_impls(ccx: &CrateCtxt) -> CompileResult {
518 ccx.tcx.sess.track_errors(|| {
519 let _task = ccx.tcx.dep_graph.in_task(DepNode::Dropck);
520 let drop_trait = match ccx.tcx.lang_items.drop_trait() {
521 Some(id) => ccx.tcx.lookup_trait_def(id), None => { return }
523 drop_trait.for_each_impl(ccx.tcx, |drop_impl_did| {
524 let _task = ccx.tcx.dep_graph.in_task(DepNode::DropckImpl(drop_impl_did));
525 if drop_impl_did.is_local() {
526 match dropck::check_drop_impl(ccx, drop_impl_did) {
529 assert!(ccx.tcx.sess.has_errors());
537 fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
538 decl: &'tcx hir::FnDecl,
539 body: &'tcx hir::Block,
540 fn_id: ast::NodeId) {
541 let raw_fty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(fn_id)).ty;
542 let fn_ty = match raw_fty.sty {
543 ty::TyFnDef(_, _, f) => f,
544 _ => span_bug!(body.span, "check_bare_fn: function type expected")
547 ccx.inherited(fn_id).enter(|inh| {
548 // Compute the fty from point of view of inside fn.
549 let fn_scope = inh.tcx.region_maps.call_site_extent(fn_id, body.id);
551 fn_ty.sig.subst(inh.tcx, &inh.parameter_environment.free_substs);
553 inh.tcx.liberate_late_bound_regions(fn_scope, &fn_sig);
555 inh.normalize_associated_types_in(body.span, body.id, &fn_sig);
557 let fcx = check_fn(&inh, fn_ty.unsafety, fn_id, &fn_sig, decl, fn_id, body);
559 fcx.select_all_obligations_and_apply_defaults();
560 fcx.closure_analyze_fn(body);
561 fcx.select_obligations_where_possible();
563 fcx.select_all_obligations_or_error(); // Casts can introduce new obligations.
565 fcx.regionck_fn(fn_id, decl, body);
566 fcx.resolve_type_vars_in_fn(decl, body, fn_id);
570 struct GatherLocalsVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
571 fcx: &'a FnCtxt<'a, 'gcx, 'tcx>
574 impl<'a, 'gcx, 'tcx> GatherLocalsVisitor<'a, 'gcx, 'tcx> {
575 fn assign(&mut self, _span: Span, nid: ast::NodeId, ty_opt: Option<Ty<'tcx>>) -> Ty<'tcx> {
578 // infer the variable's type
579 let var_ty = self.fcx.next_ty_var();
580 self.fcx.locals.borrow_mut().insert(nid, var_ty);
584 // take type that the user specified
585 self.fcx.locals.borrow_mut().insert(nid, typ);
592 impl<'a, 'gcx, 'tcx> Visitor<'gcx> for GatherLocalsVisitor<'a, 'gcx, 'tcx> {
593 // Add explicitly-declared locals.
594 fn visit_local(&mut self, local: &'gcx hir::Local) {
595 let o_ty = match local.ty {
596 Some(ref ty) => Some(self.fcx.to_ty(&ty)),
599 self.assign(local.span, local.id, o_ty);
600 debug!("Local variable {:?} is assigned type {}",
602 self.fcx.ty_to_string(
603 self.fcx.locals.borrow().get(&local.id).unwrap().clone()));
604 intravisit::walk_local(self, local);
607 // Add pattern bindings.
608 fn visit_pat(&mut self, p: &'gcx hir::Pat) {
609 if let PatKind::Binding(_, ref path1, _) = p.node {
610 let var_ty = self.assign(p.span, p.id, None);
612 self.fcx.require_type_is_sized(var_ty, p.span,
613 traits::VariableType(p.id));
615 debug!("Pattern binding {} is assigned to {} with type {:?}",
617 self.fcx.ty_to_string(
618 self.fcx.locals.borrow().get(&p.id).unwrap().clone()),
621 intravisit::walk_pat(self, p);
624 fn visit_block(&mut self, b: &'gcx hir::Block) {
625 // non-obvious: the `blk` variable maps to region lb, so
626 // we have to keep this up-to-date. This
627 // is... unfortunate. It'd be nice to not need this.
628 intravisit::walk_block(self, b);
631 // Since an expr occurs as part of the type fixed size arrays we
632 // need to record the type for that node
633 fn visit_ty(&mut self, t: &'gcx hir::Ty) {
635 hir::TyFixedLengthVec(ref ty, ref count_expr) => {
637 self.fcx.check_expr_with_hint(&count_expr, self.fcx.tcx.types.usize);
639 hir::TyBareFn(ref function_declaration) => {
640 intravisit::walk_fn_decl_nopat(self, &function_declaration.decl);
641 walk_list!(self, visit_lifetime_def, &function_declaration.lifetimes);
643 _ => intravisit::walk_ty(self, t)
647 // Don't descend into the bodies of nested closures
648 fn visit_fn(&mut self, _: intravisit::FnKind<'gcx>, _: &'gcx hir::FnDecl,
649 _: &'gcx hir::Block, _: Span, _: ast::NodeId) { }
652 /// Helper used by check_bare_fn and check_expr_fn. Does the grungy work of checking a function
653 /// body and returns the function context used for that purpose, since in the case of a fn item
654 /// there is still a bit more to do.
657 /// * inherited: other fields inherited from the enclosing fn (if any)
658 fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>,
659 unsafety: hir::Unsafety,
660 unsafety_id: ast::NodeId,
661 fn_sig: &ty::FnSig<'tcx>,
662 decl: &'gcx hir::FnDecl,
664 body: &'gcx hir::Block)
665 -> FnCtxt<'a, 'gcx, 'tcx>
667 let mut fn_sig = fn_sig.clone();
669 debug!("check_fn(sig={:?}, fn_id={})", fn_sig, fn_id);
671 // Create the function context. This is either derived from scratch or,
672 // in the case of function expressions, based on the outer context.
673 let mut fcx = FnCtxt::new(inherited, fn_sig.output, body.id);
674 *fcx.ps.borrow_mut() = UnsafetyState::function(unsafety, unsafety_id);
676 fcx.require_type_is_sized(fcx.ret_ty, decl.output.span(), traits::ReturnType);
677 fcx.ret_ty = fcx.instantiate_anon_types(&fcx.ret_ty);
678 fn_sig.output = fcx.ret_ty;
681 let mut visit = GatherLocalsVisitor { fcx: &fcx, };
683 // Add formal parameters.
684 for (arg_ty, input) in fn_sig.inputs.iter().zip(&decl.inputs) {
685 // The type of the argument must be well-formed.
687 // NB -- this is now checked in wfcheck, but that
688 // currently only results in warnings, so we issue an
689 // old-style WF obligation here so that we still get the
690 // errors that we used to get.
691 fcx.register_old_wf_obligation(arg_ty, input.ty.span, traits::MiscObligation);
693 // Create type variables for each argument.
694 pat_util::pat_bindings(&input.pat, |_bm, pat_id, sp, _path| {
695 let var_ty = visit.assign(sp, pat_id, None);
696 fcx.require_type_is_sized(var_ty, sp, traits::VariableType(pat_id));
699 // Check the pattern.
700 fcx.check_pat(&input.pat, arg_ty);
701 fcx.write_ty(input.id, arg_ty);
704 visit.visit_block(body);
707 inherited.tables.borrow_mut().liberated_fn_sigs.insert(fn_id, fn_sig);
709 // FIXME(aburka) do we need this special case? and should it be is_uninhabited?
710 let expected = if fcx.ret_ty.is_never() {
713 ExpectHasType(fcx.ret_ty)
715 fcx.check_block_with_expected(body, expected);
720 pub fn check_struct(ccx: &CrateCtxt, id: ast::NodeId, span: Span) {
723 check_representable(tcx, span, id, "struct");
725 if tcx.lookup_simd(ccx.tcx.map.local_def_id(id)) {
726 check_simd(tcx, span, id);
730 pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
731 debug!("check_item_type(it.id={}, it.name={})",
733 ccx.tcx.item_path_str(ccx.tcx.map.local_def_id(it.id)));
734 let _indenter = indenter();
736 // Consts can play a role in type-checking, so they are included here.
737 hir::ItemStatic(_, _, ref e) |
738 hir::ItemConst(_, ref e) => check_const(ccx, &e, it.id),
739 hir::ItemEnum(ref enum_definition, _) => {
740 check_enum_variants(ccx,
742 &enum_definition.variants,
745 hir::ItemFn(..) => {} // entirely within check_item_body
746 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
747 debug!("ItemImpl {} with id {}", it.name, it.id);
748 let impl_def_id = ccx.tcx.map.local_def_id(it.id);
749 match ccx.tcx.impl_trait_ref(impl_def_id) {
750 Some(impl_trait_ref) => {
751 check_impl_items_against_trait(ccx,
756 let trait_def_id = impl_trait_ref.def_id;
757 check_on_unimplemented(ccx, trait_def_id, it);
762 hir::ItemTrait(..) => {
763 let def_id = ccx.tcx.map.local_def_id(it.id);
764 check_on_unimplemented(ccx, def_id, it);
766 hir::ItemStruct(..) => {
767 check_struct(ccx, it.id, it.span);
769 hir::ItemTy(_, ref generics) => {
770 let pty_ty = ccx.tcx.node_id_to_type(it.id);
771 check_bounds_are_used(ccx, generics, pty_ty);
773 hir::ItemForeignMod(ref m) => {
774 if m.abi == Abi::RustIntrinsic {
775 for item in &m.items {
776 intrinsic::check_intrinsic_type(ccx, item);
778 } else if m.abi == Abi::PlatformIntrinsic {
779 for item in &m.items {
780 intrinsic::check_platform_intrinsic_type(ccx, item);
783 for item in &m.items {
784 let pty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(item.id));
785 if !pty.generics.types.is_empty() {
786 let mut err = struct_span_err!(ccx.tcx.sess, item.span, E0044,
787 "foreign items may not have type parameters");
788 span_help!(&mut err, item.span,
789 "consider using specialization instead of \
794 if let hir::ForeignItemFn(ref fn_decl, _) = item.node {
795 require_c_abi_if_variadic(ccx.tcx, fn_decl, m.abi, item.span);
800 _ => {/* nothing to do */ }
804 pub fn check_item_body<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
805 debug!("check_item_body(it.id={}, it.name={})",
807 ccx.tcx.item_path_str(ccx.tcx.map.local_def_id(it.id)));
808 let _indenter = indenter();
810 hir::ItemFn(ref decl, _, _, _, _, ref body) => {
811 check_bare_fn(ccx, &decl, &body, it.id);
813 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
814 debug!("ItemImpl {} with id {}", it.name, it.id);
816 for impl_item in impl_items {
817 match impl_item.node {
818 hir::ImplItemKind::Const(_, ref expr) => {
819 check_const(ccx, &expr, impl_item.id)
821 hir::ImplItemKind::Method(ref sig, ref body) => {
822 check_bare_fn(ccx, &sig.decl, body, impl_item.id);
824 hir::ImplItemKind::Type(_) => {
825 // Nothing to do here.
830 hir::ItemTrait(_, _, _, ref trait_items) => {
831 for trait_item in trait_items {
832 match trait_item.node {
833 hir::ConstTraitItem(_, Some(ref expr)) => {
834 check_const(ccx, &expr, trait_item.id)
836 hir::MethodTraitItem(ref sig, Some(ref body)) => {
837 check_bare_fn(ccx, &sig.decl, body, trait_item.id);
839 hir::MethodTraitItem(_, None) |
840 hir::ConstTraitItem(_, None) |
841 hir::TypeTraitItem(..) => {
847 _ => {/* nothing to do */ }
851 fn check_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
854 let generics = ccx.tcx.lookup_generics(def_id);
855 if let Some(ref attr) = item.attrs.iter().find(|a| {
856 a.check_name("rustc_on_unimplemented")
858 if let Some(ref istring) = attr.value_str() {
859 let parser = Parser::new(&istring);
860 let types = &generics.types;
861 for token in parser {
863 Piece::String(_) => (), // Normal string, no need to check it
864 Piece::NextArgument(a) => match a.position {
865 // `{Self}` is allowed
866 Position::ArgumentNamed(s) if s == "Self" => (),
867 // So is `{A}` if A is a type parameter
868 Position::ArgumentNamed(s) => match types.iter().find(|t| {
873 let name = ccx.tcx.item_name(def_id);
874 span_err!(ccx.tcx.sess, attr.span, E0230,
875 "there is no type parameter \
880 // `{:1}` and `{}` are not to be used
881 Position::ArgumentIs(_) => {
882 span_err!(ccx.tcx.sess, attr.span, E0231,
883 "only named substitution \
884 parameters are allowed");
891 ccx.tcx.sess, attr.span, E0232,
892 "this attribute must have a value")
893 .span_label(attr.span, &format!("attribute requires a value"))
894 .note(&format!("eg `#[rustc_on_unimplemented = \"foo\"]`"))
900 fn report_forbidden_specialization<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
901 impl_item: &hir::ImplItem,
904 let mut err = struct_span_err!(
905 tcx.sess, impl_item.span, E0520,
906 "item `{}` is provided by an `impl` that specializes \
907 another, but the item in the parent `impl` is not \
908 marked `default` and so it cannot be specialized.",
911 match tcx.span_of_impl(parent_impl) {
913 err.span_note(span, "parent implementation is here:");
916 err.note(&format!("parent implementation is in crate `{}`", cname));
923 fn check_specialization_validity<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
924 trait_def: &ty::TraitDef<'tcx>,
926 impl_item: &hir::ImplItem)
928 let ancestors = trait_def.ancestors(impl_id);
930 let parent = match impl_item.node {
931 hir::ImplItemKind::Const(..) => {
932 ancestors.const_defs(tcx, impl_item.name).skip(1).next()
933 .map(|node_item| node_item.map(|parent| parent.defaultness))
935 hir::ImplItemKind::Method(..) => {
936 ancestors.fn_defs(tcx, impl_item.name).skip(1).next()
937 .map(|node_item| node_item.map(|parent| parent.defaultness))
940 hir::ImplItemKind::Type(_) => {
941 ancestors.type_defs(tcx, impl_item.name).skip(1).next()
942 .map(|node_item| node_item.map(|parent| parent.defaultness))
946 if let Some(parent) = parent {
947 if parent.item.is_final() {
948 report_forbidden_specialization(tcx, impl_item, parent.node.def_id());
954 fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
957 impl_trait_ref: &ty::TraitRef<'tcx>,
958 impl_items: &[hir::ImplItem]) {
959 // If the trait reference itself is erroneous (so the compilation is going
960 // to fail), skip checking the items here -- the `impl_item` table in `tcx`
961 // isn't populated for such impls.
962 if impl_trait_ref.references_error() { return; }
964 // Locate trait definition and items
966 let trait_def = tcx.lookup_trait_def(impl_trait_ref.def_id);
967 let trait_items = tcx.trait_items(impl_trait_ref.def_id);
968 let mut overridden_associated_type = None;
970 // Check existing impl methods to see if they are both present in trait
971 // and compatible with trait signature
972 for impl_item in impl_items {
973 let ty_impl_item = tcx.impl_or_trait_item(tcx.map.local_def_id(impl_item.id));
974 let ty_trait_item = trait_items.iter()
975 .find(|ac| ac.name() == ty_impl_item.name());
977 // Check that impl definition matches trait definition
978 if let Some(ty_trait_item) = ty_trait_item {
979 match impl_item.node {
980 hir::ImplItemKind::Const(..) => {
981 let impl_const = match ty_impl_item {
982 ty::ConstTraitItem(ref cti) => cti,
983 _ => span_bug!(impl_item.span, "non-const impl-item for const")
986 // Find associated const definition.
987 if let &ty::ConstTraitItem(ref trait_const) = ty_trait_item {
988 compare_const_impl(ccx,
994 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0323,
995 "item `{}` is an associated const, \
996 which doesn't match its trait `{:?}`",
999 err.span_label(impl_item.span, &format!("does not match trait"));
1000 // We can only get the spans from local trait definition
1001 // Same for E0324 and E0325
1002 if let Some(trait_span) = tcx.map.span_if_local(ty_trait_item.def_id()) {
1003 err.span_label(trait_span, &format!("item in trait"));
1008 hir::ImplItemKind::Method(_, ref body) => {
1009 let impl_method = match ty_impl_item {
1010 ty::MethodTraitItem(ref mti) => mti,
1011 _ => span_bug!(impl_item.span, "non-method impl-item for method")
1014 if let &ty::MethodTraitItem(ref trait_method) = ty_trait_item {
1015 compare_impl_method(ccx,
1022 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0324,
1023 "item `{}` is an associated method, \
1024 which doesn't match its trait `{:?}`",
1027 err.span_label(impl_item.span, &format!("does not match trait"));
1028 if let Some(trait_span) = tcx.map.span_if_local(ty_trait_item.def_id()) {
1029 err.span_label(trait_span, &format!("item in trait"));
1034 hir::ImplItemKind::Type(_) => {
1035 let impl_type = match ty_impl_item {
1036 ty::TypeTraitItem(ref tti) => tti,
1037 _ => span_bug!(impl_item.span, "non-type impl-item for type")
1040 if let &ty::TypeTraitItem(ref at) = ty_trait_item {
1041 if let Some(_) = at.ty {
1042 overridden_associated_type = Some(impl_item);
1045 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0325,
1046 "item `{}` is an associated type, \
1047 which doesn't match its trait `{:?}`",
1050 err.span_label(impl_item.span, &format!("does not match trait"));
1051 if let Some(trait_span) = tcx.map.span_if_local(ty_trait_item.def_id()) {
1052 err.span_label(trait_span, &format!("item in trait"));
1060 check_specialization_validity(tcx, trait_def, impl_id, impl_item);
1063 // Check for missing items from trait
1064 let provided_methods = tcx.provided_trait_methods(impl_trait_ref.def_id);
1065 let mut missing_items = Vec::new();
1066 let mut invalidated_items = Vec::new();
1067 let associated_type_overridden = overridden_associated_type.is_some();
1068 for trait_item in trait_items.iter() {
1073 ty::ConstTraitItem(ref associated_const) => {
1074 is_provided = associated_const.has_value;
1075 is_implemented = impl_items.iter().any(|ii| {
1077 hir::ImplItemKind::Const(..) => {
1078 ii.name == associated_const.name
1084 ty::MethodTraitItem(ref trait_method) => {
1085 is_provided = provided_methods.iter().any(|m| m.name == trait_method.name);
1086 is_implemented = trait_def.ancestors(impl_id)
1087 .fn_defs(tcx, trait_method.name)
1089 .map(|node_item| !node_item.node.is_from_trait())
1092 ty::TypeTraitItem(ref trait_assoc_ty) => {
1093 is_provided = trait_assoc_ty.ty.is_some();
1094 is_implemented = trait_def.ancestors(impl_id)
1095 .type_defs(tcx, trait_assoc_ty.name)
1097 .map(|node_item| !node_item.node.is_from_trait())
1102 if !is_implemented {
1104 missing_items.push(trait_item.name());
1105 } else if associated_type_overridden {
1106 invalidated_items.push(trait_item.name());
1111 if !missing_items.is_empty() {
1112 struct_span_err!(tcx.sess, impl_span, E0046,
1113 "not all trait items implemented, missing: `{}`",
1114 missing_items.iter()
1115 .map(|name| name.to_string())
1116 .collect::<Vec<_>>().join("`, `"))
1117 .span_label(impl_span, &format!("missing `{}` in implementation",
1118 missing_items.iter()
1119 .map(|name| name.to_string())
1120 .collect::<Vec<_>>().join("`, `"))
1124 if !invalidated_items.is_empty() {
1125 let invalidator = overridden_associated_type.unwrap();
1126 span_err!(tcx.sess, invalidator.span, E0399,
1127 "the following trait items need to be reimplemented \
1128 as `{}` was overridden: `{}`",
1130 invalidated_items.iter()
1131 .map(|name| name.to_string())
1132 .collect::<Vec<_>>().join("`, `"))
1136 /// Checks a constant with a given type.
1137 fn check_const_with_type<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
1138 expr: &'tcx hir::Expr,
1139 expected_type: Ty<'tcx>,
1141 ccx.inherited(id).enter(|inh| {
1142 let fcx = FnCtxt::new(&inh, expected_type, expr.id);
1143 fcx.require_type_is_sized(expected_type, expr.span, traits::ConstSized);
1145 // Gather locals in statics (because of block expressions).
1146 // This is technically unnecessary because locals in static items are forbidden,
1147 // but prevents type checking from blowing up before const checking can properly
1149 GatherLocalsVisitor { fcx: &fcx }.visit_expr(expr);
1151 fcx.check_expr_coercable_to_type(expr, expected_type);
1153 fcx.select_all_obligations_and_apply_defaults();
1154 fcx.closure_analyze_const(expr);
1155 fcx.select_obligations_where_possible();
1157 fcx.select_all_obligations_or_error();
1159 fcx.regionck_expr(expr);
1160 fcx.resolve_type_vars_in_expr(expr, id);
1164 fn check_const<'a, 'tcx>(ccx: &CrateCtxt<'a,'tcx>,
1165 expr: &'tcx hir::Expr,
1167 let decl_ty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(id)).ty;
1168 check_const_with_type(ccx, expr, decl_ty, id);
1171 /// Checks whether a type can be represented in memory. In particular, it
1172 /// identifies types that contain themselves without indirection through a
1173 /// pointer, which would mean their size is unbounded.
1174 pub fn check_representable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
1176 item_id: ast::NodeId,
1177 _designation: &str) -> bool {
1178 let rty = tcx.node_id_to_type(item_id);
1180 // Check that it is possible to represent this type. This call identifies
1181 // (1) types that contain themselves and (2) types that contain a different
1182 // recursive type. It is only necessary to throw an error on those that
1183 // contain themselves. For case 2, there must be an inner type that will be
1184 // caught by case 1.
1185 match rty.is_representable(tcx, sp) {
1186 Representability::SelfRecursive => {
1187 let item_def_id = tcx.map.local_def_id(item_id);
1188 tcx.recursive_type_with_infinite_size_error(item_def_id).emit();
1191 Representability::Representable | Representability::ContainsRecursive => (),
1196 pub fn check_simd<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span, id: ast::NodeId) {
1197 let t = tcx.node_id_to_type(id);
1199 ty::TyStruct(def, substs) => {
1200 let fields = &def.struct_variant().fields;
1201 if fields.is_empty() {
1202 span_err!(tcx.sess, sp, E0075, "SIMD vector cannot be empty");
1205 let e = fields[0].ty(tcx, substs);
1206 if !fields.iter().all(|f| f.ty(tcx, substs) == e) {
1207 span_err!(tcx.sess, sp, E0076, "SIMD vector should be homogeneous");
1211 ty::TyParam(_) => { /* struct<T>(T, T, T, T) is ok */ }
1212 _ if e.is_machine() => { /* struct(u8, u8, u8, u8) is ok */ }
1214 span_err!(tcx.sess, sp, E0077,
1215 "SIMD vector element type should be machine type");
1224 #[allow(trivial_numeric_casts)]
1225 pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
1227 vs: &'tcx [hir::Variant],
1229 let def_id = ccx.tcx.map.local_def_id(id);
1230 let hint = *ccx.tcx.lookup_repr_hints(def_id).get(0).unwrap_or(&attr::ReprAny);
1232 if hint != attr::ReprAny && vs.is_empty() {
1234 ccx.tcx.sess, sp, E0084,
1235 "unsupported representation for zero-variant enum")
1236 .span_label(sp, &format!("unsupported enum representation"))
1240 let repr_type_ty = ccx.tcx.enum_repr_type(Some(&hint)).to_ty(ccx.tcx);
1242 if let Some(ref e) = v.node.disr_expr {
1243 check_const_with_type(ccx, e, repr_type_ty, e.id);
1247 let def_id = ccx.tcx.map.local_def_id(id);
1249 let variants = &ccx.tcx.lookup_adt_def(def_id).variants;
1250 let mut disr_vals: Vec<ty::Disr> = Vec::new();
1251 for (v, variant) in vs.iter().zip(variants.iter()) {
1252 let current_disr_val = variant.disr_val;
1254 // Check for duplicate discriminant values
1255 if let Some(i) = disr_vals.iter().position(|&x| x == current_disr_val) {
1256 let variant_i_node_id = ccx.tcx.map.as_local_node_id(variants[i].did).unwrap();
1257 let variant_i = ccx.tcx.map.expect_variant(variant_i_node_id);
1258 let i_span = match variant_i.node.disr_expr {
1259 Some(ref expr) => expr.span,
1260 None => ccx.tcx.map.span(variant_i_node_id)
1262 let span = match v.node.disr_expr {
1263 Some(ref expr) => expr.span,
1266 struct_span_err!(ccx.tcx.sess, span, E0081,
1267 "discriminant value `{}` already exists", disr_vals[i])
1268 .span_label(i_span, &format!("first use of `{}`", disr_vals[i]))
1269 .span_label(span , &format!("enum already has `{}`", disr_vals[i]))
1272 disr_vals.push(current_disr_val);
1275 check_representable(ccx.tcx, sp, id, "enum");
1278 impl<'a, 'gcx, 'tcx> AstConv<'gcx, 'tcx> for FnCtxt<'a, 'gcx, 'tcx> {
1279 fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
1281 fn ast_ty_to_ty_cache(&self) -> &RefCell<NodeMap<Ty<'tcx>>> {
1282 &self.ast_ty_to_ty_cache
1285 fn get_generics(&self, _: Span, id: DefId)
1286 -> Result<&'tcx ty::Generics<'tcx>, ErrorReported>
1288 Ok(self.tcx().lookup_generics(id))
1291 fn get_item_type_scheme(&self, _: Span, id: DefId)
1292 -> Result<ty::TypeScheme<'tcx>, ErrorReported>
1294 Ok(self.tcx().lookup_item_type(id))
1297 fn get_trait_def(&self, _: Span, id: DefId)
1298 -> Result<&'tcx ty::TraitDef<'tcx>, ErrorReported>
1300 Ok(self.tcx().lookup_trait_def(id))
1303 fn ensure_super_predicates(&self, _: Span, _: DefId) -> Result<(), ErrorReported> {
1304 // all super predicates are ensured during collect pass
1308 fn get_free_substs(&self) -> Option<&Substs<'tcx>> {
1309 Some(&self.parameter_environment.free_substs)
1312 fn get_type_parameter_bounds(&self,
1314 node_id: ast::NodeId)
1315 -> Result<Vec<ty::PolyTraitRef<'tcx>>, ErrorReported>
1317 let def = self.tcx.type_parameter_def(node_id);
1318 let r = self.parameter_environment
1321 .filter_map(|predicate| {
1323 ty::Predicate::Trait(ref data) => {
1324 if data.0.self_ty().is_param(def.index) {
1325 Some(data.to_poly_trait_ref())
1339 fn trait_defines_associated_type_named(&self,
1340 trait_def_id: DefId,
1341 assoc_name: ast::Name)
1344 let trait_def = self.tcx().lookup_trait_def(trait_def_id);
1345 trait_def.associated_type_names.contains(&assoc_name)
1348 fn ty_infer(&self, _span: Span) -> Ty<'tcx> {
1352 fn ty_infer_for_def(&self,
1353 ty_param_def: &ty::TypeParameterDef<'tcx>,
1354 substs: &Substs<'tcx>,
1355 span: Span) -> Ty<'tcx> {
1356 self.type_var_for_def(span, ty_param_def, substs)
1359 fn projected_ty_from_poly_trait_ref(&self,
1361 poly_trait_ref: ty::PolyTraitRef<'tcx>,
1362 item_name: ast::Name)
1365 let (trait_ref, _) =
1366 self.replace_late_bound_regions_with_fresh_var(
1368 infer::LateBoundRegionConversionTime::AssocTypeProjection(item_name),
1371 self.normalize_associated_type(span, trait_ref, item_name)
1374 fn projected_ty(&self,
1376 trait_ref: ty::TraitRef<'tcx>,
1377 item_name: ast::Name)
1380 self.normalize_associated_type(span, trait_ref, item_name)
1383 fn set_tainted_by_errors(&self) {
1384 self.infcx.set_tainted_by_errors()
1388 impl<'a, 'gcx, 'tcx> RegionScope for FnCtxt<'a, 'gcx, 'tcx> {
1389 fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> {
1390 Some(self.base_object_lifetime_default(span))
1393 fn base_object_lifetime_default(&self, span: Span) -> ty::Region {
1394 // RFC #599 specifies that object lifetime defaults take
1395 // precedence over other defaults. But within a fn body we
1396 // don't have a *default* region, rather we use inference to
1397 // find the *correct* region, which is strictly more general
1398 // (and anyway, within a fn body the right region may not even
1399 // be something the user can write explicitly, since it might
1400 // be some expression).
1401 *self.next_region_var(infer::MiscVariable(span))
1404 fn anon_regions(&self, span: Span, count: usize)
1405 -> Result<Vec<ty::Region>, Option<Vec<ElisionFailureInfo>>> {
1406 Ok((0..count).map(|_| {
1407 *self.next_region_var(infer::MiscVariable(span))
1412 /// Controls whether the arguments are tupled. This is used for the call
1415 /// Tupling means that all call-side arguments are packed into a tuple and
1416 /// passed as a single parameter. For example, if tupling is enabled, this
1419 /// fn f(x: (isize, isize))
1421 /// Can be called as:
1428 #[derive(Clone, Eq, PartialEq)]
1429 enum TupleArgumentsFlag {
1434 impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
1435 pub fn new(inh: &'a Inherited<'a, 'gcx, 'tcx>,
1437 body_id: ast::NodeId)
1438 -> FnCtxt<'a, 'gcx, 'tcx> {
1440 ast_ty_to_ty_cache: RefCell::new(NodeMap()),
1442 writeback_errors: Cell::new(false),
1443 err_count_on_creation: inh.tcx.sess.err_count(),
1445 ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, 0)),
1450 pub fn param_env(&self) -> &ty::ParameterEnvironment<'tcx> {
1451 &self.parameter_environment
1454 pub fn sess(&self) -> &Session {
1458 pub fn err_count_since_creation(&self) -> usize {
1459 self.tcx.sess.err_count() - self.err_count_on_creation
1462 /// Resolves type variables in `ty` if possible. Unlike the infcx
1463 /// version (resolve_type_vars_if_possible), this version will
1464 /// also select obligations if it seems useful, in an effort
1465 /// to get more type information.
1466 fn resolve_type_vars_with_obligations(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> {
1467 debug!("resolve_type_vars_with_obligations(ty={:?})", ty);
1469 // No TyInfer()? Nothing needs doing.
1470 if !ty.has_infer_types() {
1471 debug!("resolve_type_vars_with_obligations: ty={:?}", ty);
1475 // If `ty` is a type variable, see whether we already know what it is.
1476 ty = self.resolve_type_vars_if_possible(&ty);
1477 if !ty.has_infer_types() {
1478 debug!("resolve_type_vars_with_obligations: ty={:?}", ty);
1482 // If not, try resolving pending obligations as much as
1483 // possible. This can help substantially when there are
1484 // indirect dependencies that don't seem worth tracking
1486 self.select_obligations_where_possible();
1487 ty = self.resolve_type_vars_if_possible(&ty);
1489 debug!("resolve_type_vars_with_obligations: ty={:?}", ty);
1493 fn record_deferred_call_resolution(&self,
1494 closure_def_id: DefId,
1495 r: DeferredCallResolutionHandler<'gcx, 'tcx>) {
1496 let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
1497 deferred_call_resolutions.entry(closure_def_id).or_insert(vec![]).push(r);
1500 fn remove_deferred_call_resolutions(&self,
1501 closure_def_id: DefId)
1502 -> Vec<DeferredCallResolutionHandler<'gcx, 'tcx>>
1504 let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
1505 deferred_call_resolutions.remove(&closure_def_id).unwrap_or(Vec::new())
1508 pub fn tag(&self) -> String {
1509 let self_ptr: *const FnCtxt = self;
1510 format!("{:?}", self_ptr)
1513 pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> Ty<'tcx> {
1514 match self.locals.borrow().get(&nid) {
1517 span_err!(self.tcx.sess, span, E0513,
1518 "no type for local variable {}",
1526 pub fn write_ty(&self, node_id: ast::NodeId, ty: Ty<'tcx>) {
1527 debug!("write_ty({}, {:?}) in fcx {}",
1528 node_id, ty, self.tag());
1529 self.tables.borrow_mut().node_types.insert(node_id, ty);
1531 // Add adjustments to !-expressions
1533 if let Some(hir::map::NodeExpr(_)) = self.tcx.map.find(node_id) {
1534 let adj = adjustment::AdjustNeverToAny(self.next_diverging_ty_var());
1535 self.write_adjustment(node_id, adj);
1540 pub fn write_substs(&self, node_id: ast::NodeId, substs: ty::ItemSubsts<'tcx>) {
1541 if !substs.substs.is_noop() {
1542 debug!("write_substs({}, {:?}) in fcx {}",
1547 self.tables.borrow_mut().item_substs.insert(node_id, substs);
1551 pub fn write_autoderef_adjustment(&self,
1552 node_id: ast::NodeId,
1554 self.write_adjustment(
1556 adjustment::AdjustDerefRef(adjustment::AutoDerefRef {
1564 pub fn write_adjustment(&self,
1565 node_id: ast::NodeId,
1566 adj: adjustment::AutoAdjustment<'tcx>) {
1567 debug!("write_adjustment(node_id={}, adj={:?})", node_id, adj);
1569 if adj.is_identity() {
1573 self.tables.borrow_mut().adjustments.insert(node_id, adj);
1576 /// Basically whenever we are converting from a type scheme into
1577 /// the fn body space, we always want to normalize associated
1578 /// types as well. This function combines the two.
1579 fn instantiate_type_scheme<T>(&self,
1581 substs: &Substs<'tcx>,
1584 where T : TypeFoldable<'tcx>
1586 let value = value.subst(self.tcx, substs);
1587 let result = self.normalize_associated_types_in(span, &value);
1588 debug!("instantiate_type_scheme(value={:?}, substs={:?}) = {:?}",
1595 /// As `instantiate_type_scheme`, but for the bounds found in a
1596 /// generic type scheme.
1597 fn instantiate_bounds(&self,
1599 substs: &Substs<'tcx>,
1600 bounds: &ty::GenericPredicates<'tcx>)
1601 -> ty::InstantiatedPredicates<'tcx>
1603 let result = bounds.instantiate(self.tcx, substs);
1604 let result = self.normalize_associated_types_in(span, &result.predicates);
1605 debug!("instantiate_bounds(bounds={:?}, substs={:?}) = {:?}",
1609 ty::InstantiatedPredicates {
1614 /// Replace all anonymized types with fresh inference variables
1615 /// and record them for writeback.
1616 fn instantiate_anon_types<T: TypeFoldable<'tcx>>(&self, value: &T) -> T {
1617 value.fold_with(&mut BottomUpFolder { tcx: self.tcx, fldop: |ty| {
1618 if let ty::TyAnon(def_id, substs) = ty.sty {
1619 // Use the same type variable if the exact same TyAnon appears more
1620 // than once in the return type (e.g. if it's pased to a type alias).
1621 if let Some(ty_var) = self.anon_types.borrow().get(&def_id) {
1624 let ty_var = self.next_ty_var();
1625 self.anon_types.borrow_mut().insert(def_id, ty_var);
1627 let item_predicates = self.tcx.lookup_predicates(def_id);
1628 let bounds = item_predicates.instantiate(self.tcx, substs);
1630 let span = self.tcx.map.def_id_span(def_id, codemap::DUMMY_SP);
1631 for predicate in bounds.predicates {
1632 // Change the predicate to refer to the type variable,
1633 // which will be the concrete type, instead of the TyAnon.
1634 // This also instantiates nested `impl Trait`.
1635 let predicate = self.instantiate_anon_types(&predicate);
1637 // Require that the predicate holds for the concrete type.
1638 let cause = traits::ObligationCause::new(span, self.body_id,
1639 traits::ReturnType);
1640 self.register_predicate(traits::Obligation::new(cause, predicate));
1650 fn normalize_associated_types_in<T>(&self, span: Span, value: &T) -> T
1651 where T : TypeFoldable<'tcx>
1653 self.inh.normalize_associated_types_in(span, self.body_id, value)
1656 fn normalize_associated_type(&self,
1658 trait_ref: ty::TraitRef<'tcx>,
1659 item_name: ast::Name)
1662 let cause = traits::ObligationCause::new(span,
1664 traits::ObligationCauseCode::MiscObligation);
1667 .normalize_projection_type(self,
1669 trait_ref: trait_ref,
1670 item_name: item_name,
1675 /// Instantiates the type in `did` with the generics in `path` and returns
1676 /// it (registering the necessary trait obligations along the way).
1678 /// Note that this function is only intended to be used with type-paths,
1679 /// not with value-paths.
1680 pub fn instantiate_type_path(&self,
1683 node_id: ast::NodeId)
1685 debug!("instantiate_type_path(did={:?}, path={:?})", did, path);
1686 let mut ty = self.tcx.lookup_item_type(did).ty;
1688 // Tuple variants have fn type even in type namespace, extract true variant type from it
1689 ty = self.tcx.no_late_bound_regions(&ty.fn_ret()).unwrap();
1691 let type_predicates = self.tcx.lookup_predicates(did);
1692 let substs = AstConv::ast_path_substs_for_ty(self, self,
1694 PathParamMode::Optional,
1696 path.segments.last().unwrap());
1697 debug!("instantiate_type_path: ty={:?} substs={:?}", ty, substs);
1698 let bounds = self.instantiate_bounds(path.span, substs, &type_predicates);
1699 let cause = traits::ObligationCause::new(path.span, self.body_id,
1700 traits::ItemObligation(did));
1701 self.add_obligations_for_parameters(cause, &bounds);
1703 let ty_substituted = self.instantiate_type_scheme(path.span, substs, &ty);
1704 self.write_ty(node_id, ty_substituted);
1705 self.write_substs(node_id, ty::ItemSubsts {
1711 pub fn write_nil(&self, node_id: ast::NodeId) {
1712 self.write_ty(node_id, self.tcx.mk_nil());
1715 pub fn write_never(&self, node_id: ast::NodeId) {
1716 self.write_ty(node_id, self.tcx.types.never);
1719 pub fn write_error(&self, node_id: ast::NodeId) {
1720 self.write_ty(node_id, self.tcx.types.err);
1723 pub fn require_type_meets(&self,
1726 code: traits::ObligationCauseCode<'tcx>,
1727 bound: ty::BuiltinBound)
1729 self.register_builtin_bound(
1732 traits::ObligationCause::new(span, self.body_id, code));
1735 pub fn require_type_is_sized(&self,
1738 code: traits::ObligationCauseCode<'tcx>)
1740 self.require_type_meets(ty, span, code, ty::BoundSized);
1743 pub fn require_expr_have_sized_type(&self,
1745 code: traits::ObligationCauseCode<'tcx>)
1747 self.require_type_is_sized(self.expr_ty(expr), expr.span, code);
1750 pub fn register_builtin_bound(&self,
1752 builtin_bound: ty::BuiltinBound,
1753 cause: traits::ObligationCause<'tcx>)
1755 self.fulfillment_cx.borrow_mut()
1756 .register_builtin_bound(self, ty, builtin_bound, cause);
1759 pub fn register_predicate(&self,
1760 obligation: traits::PredicateObligation<'tcx>)
1762 debug!("register_predicate({:?})",
1766 .register_predicate_obligation(self, obligation);
1769 pub fn to_ty(&self, ast_t: &hir::Ty) -> Ty<'tcx> {
1770 let t = AstConv::ast_ty_to_ty(self, self, ast_t);
1771 self.register_wf_obligation(t, ast_t.span, traits::MiscObligation);
1775 pub fn expr_ty(&self, ex: &hir::Expr) -> Ty<'tcx> {
1776 if let Some(&adjustment::AdjustNeverToAny(ref t))
1777 = self.tables.borrow().adjustments.get(&ex.id) {
1780 match self.tables.borrow().node_types.get(&ex.id) {
1783 bug!("no type for expr in fcx {}", self.tag());
1788 /// Apply `adjustment` to the type of `expr`
1789 pub fn adjust_expr_ty(&self,
1791 adjustment: Option<&adjustment::AutoAdjustment<'tcx>>)
1794 let raw_ty = self.expr_ty(expr);
1795 let raw_ty = self.shallow_resolve(raw_ty);
1796 let resolve_ty = |ty: Ty<'tcx>| self.resolve_type_vars_if_possible(&ty);
1797 raw_ty.adjust(self.tcx, expr.span, expr.id, adjustment, |method_call| {
1798 self.tables.borrow().method_map.get(&method_call)
1799 .map(|method| resolve_ty(method.ty))
1803 pub fn node_ty(&self, id: ast::NodeId) -> Ty<'tcx> {
1804 match self.tables.borrow().node_types.get(&id) {
1806 None if self.err_count_since_creation() != 0 => self.tcx.types.err,
1808 bug!("no type for node {}: {} in fcx {}",
1809 id, self.tcx.map.node_to_string(id),
1815 pub fn item_substs(&self) -> Ref<NodeMap<ty::ItemSubsts<'tcx>>> {
1816 // NOTE: @jroesch this is hack that appears to be fixed on nightly, will monitor if
1817 // it changes when we upgrade the snapshot compiler
1818 fn project_item_susbts<'a, 'tcx>(tables: &'a ty::Tables<'tcx>)
1819 -> &'a NodeMap<ty::ItemSubsts<'tcx>> {
1823 Ref::map(self.tables.borrow(), project_item_susbts)
1826 pub fn opt_node_ty_substs<F>(&self,
1829 F: FnOnce(&ty::ItemSubsts<'tcx>),
1831 match self.tables.borrow().item_substs.get(&id) {
1837 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1838 /// outlive the region `r`.
1839 pub fn register_region_obligation(&self,
1841 region: &'tcx ty::Region,
1842 cause: traits::ObligationCause<'tcx>)
1844 let mut fulfillment_cx = self.fulfillment_cx.borrow_mut();
1845 fulfillment_cx.register_region_obligation(ty, region, cause);
1848 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1849 /// outlive the region `r`.
1850 pub fn register_wf_obligation(&self,
1853 code: traits::ObligationCauseCode<'tcx>)
1855 // WF obligations never themselves fail, so no real need to give a detailed cause:
1856 let cause = traits::ObligationCause::new(span, self.body_id, code);
1857 self.register_predicate(traits::Obligation::new(cause, ty::Predicate::WellFormed(ty)));
1860 pub fn register_old_wf_obligation(&self,
1863 code: traits::ObligationCauseCode<'tcx>)
1865 // Registers an "old-style" WF obligation that uses the
1866 // implicator code. This is basically a buggy version of
1867 // `register_wf_obligation` that is being kept around
1868 // temporarily just to help with phasing in the newer rules.
1870 // FIXME(#27579) all uses of this should be migrated to register_wf_obligation eventually
1871 let cause = traits::ObligationCause::new(span, self.body_id, code);
1872 self.register_region_obligation(ty, self.tcx.mk_region(ty::ReEmpty), cause);
1875 /// Registers obligations that all types appearing in `substs` are well-formed.
1876 pub fn add_wf_bounds(&self, substs: &Substs<'tcx>, expr: &hir::Expr)
1878 for ty in substs.types() {
1879 self.register_wf_obligation(ty, expr.span, traits::MiscObligation);
1883 /// Given a fully substituted set of bounds (`generic_bounds`), and the values with which each
1884 /// type/region parameter was instantiated (`substs`), creates and registers suitable
1885 /// trait/region obligations.
1887 /// For example, if there is a function:
1890 /// fn foo<'a,T:'a>(...)
1893 /// and a reference:
1899 /// Then we will create a fresh region variable `'$0` and a fresh type variable `$1` for `'a`
1900 /// and `T`. This routine will add a region obligation `$1:'$0` and register it locally.
1901 pub fn add_obligations_for_parameters(&self,
1902 cause: traits::ObligationCause<'tcx>,
1903 predicates: &ty::InstantiatedPredicates<'tcx>)
1905 assert!(!predicates.has_escaping_regions());
1907 debug!("add_obligations_for_parameters(predicates={:?})",
1910 for obligation in traits::predicates_for_generics(cause, predicates) {
1911 self.register_predicate(obligation);
1915 // FIXME(arielb1): use this instead of field.ty everywhere
1916 // Only for fields! Returns <none> for methods>
1917 // Indifferent to privacy flags
1918 pub fn field_ty(&self,
1920 field: ty::FieldDef<'tcx>,
1921 substs: &Substs<'tcx>)
1924 self.normalize_associated_types_in(span,
1925 &field.ty(self.tcx, substs))
1928 fn check_casts(&self) {
1929 let mut deferred_cast_checks = self.deferred_cast_checks.borrow_mut();
1930 for cast in deferred_cast_checks.drain(..) {
1935 /// Apply "fallbacks" to some types
1936 /// ! gets replaced with (), unconstrained ints with i32, and unconstrained floats with f64.
1937 fn default_type_parameters(&self) {
1938 use rustc::ty::error::UnconstrainedNumeric::Neither;
1939 use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1941 // Defaulting inference variables becomes very dubious if we have
1942 // encountered type-checking errors. Therefore, if we think we saw
1943 // some errors in this function, just resolve all uninstanted type
1944 // varibles to TyError.
1945 if self.is_tainted_by_errors() {
1946 for ty in &self.unsolved_variables() {
1947 if let ty::TyInfer(_) = self.shallow_resolve(ty).sty {
1948 debug!("default_type_parameters: defaulting `{:?}` to error", ty);
1949 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx().types.err);
1955 for ty in &self.unsolved_variables() {
1956 let resolved = self.resolve_type_vars_if_possible(ty);
1957 if self.type_var_diverges(resolved) {
1958 debug!("default_type_parameters: defaulting `{:?}` to `!` because it diverges",
1960 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
1961 self.tcx.mk_diverging_default());
1963 match self.type_is_unconstrained_numeric(resolved) {
1964 UnconstrainedInt => {
1965 debug!("default_type_parameters: defaulting `{:?}` to `i32`",
1967 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
1969 UnconstrainedFloat => {
1970 debug!("default_type_parameters: defaulting `{:?}` to `f32`",
1972 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
1980 fn select_all_obligations_and_apply_defaults(&self) {
1981 if self.tcx.sess.features.borrow().default_type_parameter_fallback {
1982 self.new_select_all_obligations_and_apply_defaults();
1984 self.old_select_all_obligations_and_apply_defaults();
1988 // Implements old type inference fallback algorithm
1989 fn old_select_all_obligations_and_apply_defaults(&self) {
1990 self.select_obligations_where_possible();
1991 self.default_type_parameters();
1992 self.select_obligations_where_possible();
1995 fn new_select_all_obligations_and_apply_defaults(&self) {
1996 use rustc::ty::error::UnconstrainedNumeric::Neither;
1997 use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1999 // For the time being this errs on the side of being memory wasteful but provides better
2001 // let type_variables = self.type_variables.clone();
2003 // There is a possibility that this algorithm will have to run an arbitrary number of times
2004 // to terminate so we bound it by the compiler's recursion limit.
2005 for _ in 0..self.tcx.sess.recursion_limit.get() {
2006 // First we try to solve all obligations, it is possible that the last iteration
2007 // has made it possible to make more progress.
2008 self.select_obligations_where_possible();
2010 let mut conflicts = Vec::new();
2012 // Collect all unsolved type, integral and floating point variables.
2013 let unsolved_variables = self.unsolved_variables();
2015 // We must collect the defaults *before* we do any unification. Because we have
2016 // directly attached defaults to the type variables any unification that occurs
2017 // will erase defaults causing conflicting defaults to be completely ignored.
2018 let default_map: FnvHashMap<_, _> =
2021 .filter_map(|t| self.default(t).map(|d| (t, d)))
2024 let mut unbound_tyvars = FnvHashSet();
2026 debug!("select_all_obligations_and_apply_defaults: defaults={:?}", default_map);
2028 // We loop over the unsolved variables, resolving them and if they are
2029 // and unconstrainted numeric type we add them to the set of unbound
2030 // variables. We do this so we only apply literal fallback to type
2031 // variables without defaults.
2032 for ty in &unsolved_variables {
2033 let resolved = self.resolve_type_vars_if_possible(ty);
2034 if self.type_var_diverges(resolved) {
2035 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
2036 self.tcx.mk_diverging_default());
2038 match self.type_is_unconstrained_numeric(resolved) {
2039 UnconstrainedInt | UnconstrainedFloat => {
2040 unbound_tyvars.insert(resolved);
2047 // We now remove any numeric types that also have defaults, and instead insert
2048 // the type variable with a defined fallback.
2049 for ty in &unsolved_variables {
2050 if let Some(_default) = default_map.get(ty) {
2051 let resolved = self.resolve_type_vars_if_possible(ty);
2053 debug!("select_all_obligations_and_apply_defaults: \
2054 ty: {:?} with default: {:?}",
2057 match resolved.sty {
2058 ty::TyInfer(ty::TyVar(_)) => {
2059 unbound_tyvars.insert(ty);
2062 ty::TyInfer(ty::IntVar(_)) | ty::TyInfer(ty::FloatVar(_)) => {
2063 unbound_tyvars.insert(ty);
2064 if unbound_tyvars.contains(resolved) {
2065 unbound_tyvars.remove(resolved);
2074 // If there are no more fallbacks to apply at this point we have applied all possible
2075 // defaults and type inference will proceed as normal.
2076 if unbound_tyvars.is_empty() {
2080 // Finally we go through each of the unbound type variables and unify them with
2081 // the proper fallback, reporting a conflicting default error if any of the
2082 // unifications fail. We know it must be a conflicting default because the
2083 // variable would only be in `unbound_tyvars` and have a concrete value if
2084 // it had been solved by previously applying a default.
2086 // We wrap this in a transaction for error reporting, if we detect a conflict
2087 // we will rollback the inference context to its prior state so we can probe
2088 // for conflicts and correctly report them.
2091 let _ = self.commit_if_ok(|_: &infer::CombinedSnapshot| {
2092 for ty in &unbound_tyvars {
2093 if self.type_var_diverges(ty) {
2094 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
2095 self.tcx.mk_diverging_default());
2097 match self.type_is_unconstrained_numeric(ty) {
2098 UnconstrainedInt => {
2099 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
2101 UnconstrainedFloat => {
2102 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
2105 if let Some(default) = default_map.get(ty) {
2106 let default = default.clone();
2107 match self.eq_types(false,
2108 TypeOrigin::Misc(default.origin_span),
2110 Ok(InferOk { obligations, .. }) => {
2111 // FIXME(#32730) propagate obligations
2112 assert!(obligations.is_empty())
2115 conflicts.push((*ty, default));
2124 // If there are conflicts we rollback, otherwise commit
2125 if conflicts.len() > 0 {
2132 if conflicts.len() > 0 {
2133 // Loop through each conflicting default, figuring out the default that caused
2134 // a unification failure and then report an error for each.
2135 for (conflict, default) in conflicts {
2136 let conflicting_default =
2137 self.find_conflicting_default(&unbound_tyvars, &default_map, conflict)
2138 .unwrap_or(type_variable::Default {
2139 ty: self.next_ty_var(),
2140 origin_span: syntax_pos::DUMMY_SP,
2141 def_id: self.tcx.map.local_def_id(0) // what do I put here?
2144 // This is to ensure that we elimnate any non-determinism from the error
2145 // reporting by fixing an order, it doesn't matter what order we choose
2146 // just that it is consistent.
2147 let (first_default, second_default) =
2148 if default.def_id < conflicting_default.def_id {
2149 (default, conflicting_default)
2151 (conflicting_default, default)
2155 self.report_conflicting_default_types(
2156 first_default.origin_span,
2163 self.select_obligations_where_possible();
2166 // For use in error handling related to default type parameter fallback. We explicitly
2167 // apply the default that caused conflict first to a local version of the type variable
2168 // table then apply defaults until we find a conflict. That default must be the one
2169 // that caused conflict earlier.
2170 fn find_conflicting_default(&self,
2171 unbound_vars: &FnvHashSet<Ty<'tcx>>,
2172 default_map: &FnvHashMap<&Ty<'tcx>, type_variable::Default<'tcx>>,
2174 -> Option<type_variable::Default<'tcx>> {
2175 use rustc::ty::error::UnconstrainedNumeric::Neither;
2176 use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
2178 // Ensure that we apply the conflicting default first
2179 let mut unbound_tyvars = Vec::with_capacity(unbound_vars.len() + 1);
2180 unbound_tyvars.push(conflict);
2181 unbound_tyvars.extend(unbound_vars.iter());
2183 let mut result = None;
2184 // We run the same code as above applying defaults in order, this time when
2185 // we find the conflict we just return it for error reporting above.
2187 // We also run this inside snapshot that never commits so we can do error
2188 // reporting for more then one conflict.
2189 for ty in &unbound_tyvars {
2190 if self.type_var_diverges(ty) {
2191 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
2192 self.tcx.mk_diverging_default());
2194 match self.type_is_unconstrained_numeric(ty) {
2195 UnconstrainedInt => {
2196 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
2198 UnconstrainedFloat => {
2199 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
2202 if let Some(default) = default_map.get(ty) {
2203 let default = default.clone();
2204 match self.eq_types(false,
2205 TypeOrigin::Misc(default.origin_span),
2207 // FIXME(#32730) propagate obligations
2208 Ok(InferOk { obligations, .. }) => assert!(obligations.is_empty()),
2210 result = Some(default);
2222 fn select_all_obligations_or_error(&self) {
2223 debug!("select_all_obligations_or_error");
2225 // upvar inference should have ensured that all deferred call
2226 // resolutions are handled by now.
2227 assert!(self.deferred_call_resolutions.borrow().is_empty());
2229 self.select_all_obligations_and_apply_defaults();
2231 let mut fulfillment_cx = self.fulfillment_cx.borrow_mut();
2233 // Steal the deferred obligations before the fulfillment
2234 // context can turn all of them into errors.
2235 let obligations = fulfillment_cx.take_deferred_obligations();
2236 self.deferred_obligations.borrow_mut().extend(obligations);
2238 match fulfillment_cx.select_all_or_error(self) {
2240 Err(errors) => { self.report_fulfillment_errors(&errors); }
2243 if let Err(ref errors) = fulfillment_cx.select_rfc1592_obligations(self) {
2244 self.report_fulfillment_errors_as_warnings(errors, self.body_id);
2248 /// Select as many obligations as we can at present.
2249 fn select_obligations_where_possible(&self) {
2250 match self.fulfillment_cx.borrow_mut().select_where_possible(self) {
2252 Err(errors) => { self.report_fulfillment_errors(&errors); }
2256 /// For the overloaded lvalue expressions (`*x`, `x[3]`), the trait
2257 /// returns a type of `&T`, but the actual type we assign to the
2258 /// *expression* is `T`. So this function just peels off the return
2259 /// type by one layer to yield `T`.
2260 fn make_overloaded_lvalue_return_type(&self,
2261 method: MethodCallee<'tcx>)
2262 -> ty::TypeAndMut<'tcx>
2264 // extract method return type, which will be &T;
2265 // all LB regions should have been instantiated during method lookup
2266 let ret_ty = method.ty.fn_ret();
2267 let ret_ty = self.tcx.no_late_bound_regions(&ret_ty).unwrap();
2269 // method returns &T, but the type as visible to user is T, so deref
2270 ret_ty.builtin_deref(true, NoPreference).unwrap()
2273 fn lookup_indexing(&self,
2275 base_expr: &'gcx hir::Expr,
2278 lvalue_pref: LvaluePreference)
2279 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2281 // FIXME(#18741) -- this is almost but not quite the same as the
2282 // autoderef that normal method probing does. They could likely be
2285 let mut autoderef = self.autoderef(base_expr.span, base_ty);
2287 while let Some((adj_ty, autoderefs)) = autoderef.next() {
2288 if let Some(final_mt) = self.try_index_step(
2289 MethodCall::expr(expr.id),
2290 expr, base_expr, adj_ty, autoderefs,
2291 false, lvalue_pref, idx_ty)
2293 autoderef.finalize(lvalue_pref, Some(base_expr));
2294 return Some(final_mt);
2297 if let ty::TyArray(element_ty, _) = adj_ty.sty {
2298 autoderef.finalize(lvalue_pref, Some(base_expr));
2299 let adjusted_ty = self.tcx.mk_slice(element_ty);
2300 return self.try_index_step(
2301 MethodCall::expr(expr.id), expr, base_expr,
2302 adjusted_ty, autoderefs, true, lvalue_pref, idx_ty);
2305 autoderef.unambiguous_final_ty();
2309 /// To type-check `base_expr[index_expr]`, we progressively autoderef
2310 /// (and otherwise adjust) `base_expr`, looking for a type which either
2311 /// supports builtin indexing or overloaded indexing.
2312 /// This loop implements one step in that search; the autoderef loop
2313 /// is implemented by `lookup_indexing`.
2314 fn try_index_step(&self,
2315 method_call: MethodCall,
2317 base_expr: &'gcx hir::Expr,
2318 adjusted_ty: Ty<'tcx>,
2321 lvalue_pref: LvaluePreference,
2323 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2326 debug!("try_index_step(expr={:?}, base_expr.id={:?}, adjusted_ty={:?}, \
2327 autoderefs={}, unsize={}, index_ty={:?})",
2335 let input_ty = self.next_ty_var();
2337 // First, try built-in indexing.
2338 match (adjusted_ty.builtin_index(), &index_ty.sty) {
2339 (Some(ty), &ty::TyUint(ast::UintTy::Us)) | (Some(ty), &ty::TyInfer(ty::IntVar(_))) => {
2340 debug!("try_index_step: success, using built-in indexing");
2341 // If we had `[T; N]`, we should've caught it before unsizing to `[T]`.
2343 self.write_autoderef_adjustment(base_expr.id, autoderefs);
2344 return Some((tcx.types.usize, ty));
2349 // Try `IndexMut` first, if preferred.
2350 let method = match (lvalue_pref, tcx.lang_items.index_mut_trait()) {
2351 (PreferMutLvalue, Some(trait_did)) => {
2352 self.lookup_method_in_trait_adjusted(expr.span,
2354 token::intern("index_mut"),
2359 Some(vec![input_ty]))
2364 // Otherwise, fall back to `Index`.
2365 let method = match (method, tcx.lang_items.index_trait()) {
2366 (None, Some(trait_did)) => {
2367 self.lookup_method_in_trait_adjusted(expr.span,
2369 token::intern("index"),
2374 Some(vec![input_ty]))
2376 (method, _) => method,
2379 // If some lookup succeeds, write callee into table and extract index/element
2380 // type from the method signature.
2381 // If some lookup succeeded, install method in table
2382 method.map(|method| {
2383 debug!("try_index_step: success, using overloaded indexing");
2384 self.tables.borrow_mut().method_map.insert(method_call, method);
2385 (input_ty, self.make_overloaded_lvalue_return_type(method).ty)
2389 fn check_method_argument_types(&self,
2391 method_fn_ty: Ty<'tcx>,
2392 callee_expr: &'gcx hir::Expr,
2393 args_no_rcvr: &'gcx [P<hir::Expr>],
2394 tuple_arguments: TupleArgumentsFlag,
2395 expected: Expectation<'tcx>)
2397 if method_fn_ty.references_error() {
2398 let err_inputs = self.err_args(args_no_rcvr.len());
2400 let err_inputs = match tuple_arguments {
2401 DontTupleArguments => err_inputs,
2402 TupleArguments => vec![self.tcx.mk_tup(err_inputs)],
2405 self.check_argument_types(sp, &err_inputs[..], &[], args_no_rcvr,
2406 false, tuple_arguments);
2409 match method_fn_ty.sty {
2410 ty::TyFnDef(_, _, ref fty) => {
2411 // HACK(eddyb) ignore self in the definition (see above).
2412 let expected_arg_tys = self.expected_types_for_fn_args(sp, expected,
2414 &fty.sig.0.inputs[1..]);
2415 self.check_argument_types(sp, &fty.sig.0.inputs[1..], &expected_arg_tys[..],
2416 args_no_rcvr, fty.sig.0.variadic, tuple_arguments);
2420 span_bug!(callee_expr.span, "method without bare fn type");
2426 /// Generic function that factors out common logic from function calls,
2427 /// method calls and overloaded operators.
2428 fn check_argument_types(&self,
2430 fn_inputs: &[Ty<'tcx>],
2431 expected_arg_tys: &[Ty<'tcx>],
2432 args: &'gcx [P<hir::Expr>],
2434 tuple_arguments: TupleArgumentsFlag) {
2437 // Grab the argument types, supplying fresh type variables
2438 // if the wrong number of arguments were supplied
2439 let supplied_arg_count = if tuple_arguments == DontTupleArguments {
2445 // All the input types from the fn signature must outlive the call
2446 // so as to validate implied bounds.
2447 for &fn_input_ty in fn_inputs {
2448 self.register_wf_obligation(fn_input_ty, sp, traits::MiscObligation);
2451 let mut expected_arg_tys = expected_arg_tys;
2452 let expected_arg_count = fn_inputs.len();
2454 fn parameter_count_error<'tcx>(sess: &Session, sp: Span, fn_inputs: &[Ty<'tcx>],
2455 expected_count: usize, arg_count: usize, error_code: &str,
2457 let mut err = sess.struct_span_err_with_code(sp,
2458 &format!("this function takes {}{} parameter{} but {} parameter{} supplied",
2459 if variadic {"at least "} else {""},
2461 if expected_count == 1 {""} else {"s"},
2463 if arg_count == 1 {" was"} else {"s were"}),
2466 err.span_label(sp, &format!("expected {}{} parameter{}",
2467 if variadic {"at least "} else {""},
2469 if expected_count == 1 {""} else {"s"}));
2471 let input_types = fn_inputs.iter().map(|i| format!("{:?}", i)).collect::<Vec<String>>();
2472 if input_types.len() > 0 {
2473 err.note(&format!("the following parameter type{} expected: {}",
2474 if expected_count == 1 {" was"} else {"s were"},
2475 input_types.join(", ")));
2480 let formal_tys = if tuple_arguments == TupleArguments {
2481 let tuple_type = self.structurally_resolved_type(sp, fn_inputs[0]);
2482 match tuple_type.sty {
2483 ty::TyTuple(arg_types) if arg_types.len() != args.len() => {
2484 parameter_count_error(tcx.sess, sp, fn_inputs, arg_types.len(), args.len(),
2486 expected_arg_tys = &[];
2487 self.err_args(args.len())
2489 ty::TyTuple(arg_types) => {
2490 expected_arg_tys = match expected_arg_tys.get(0) {
2491 Some(&ty) => match ty.sty {
2492 ty::TyTuple(ref tys) => &tys,
2500 span_err!(tcx.sess, sp, E0059,
2501 "cannot use call notation; the first type parameter \
2502 for the function trait is neither a tuple nor unit");
2503 expected_arg_tys = &[];
2504 self.err_args(args.len())
2507 } else if expected_arg_count == supplied_arg_count {
2509 } else if variadic {
2510 if supplied_arg_count >= expected_arg_count {
2513 parameter_count_error(tcx.sess, sp, fn_inputs, expected_arg_count,
2514 supplied_arg_count, "E0060", true);
2515 expected_arg_tys = &[];
2516 self.err_args(supplied_arg_count)
2519 parameter_count_error(tcx.sess, sp, fn_inputs, expected_arg_count, supplied_arg_count,
2521 expected_arg_tys = &[];
2522 self.err_args(supplied_arg_count)
2525 debug!("check_argument_types: formal_tys={:?}",
2526 formal_tys.iter().map(|t| self.ty_to_string(*t)).collect::<Vec<String>>());
2528 // Check the arguments.
2529 // We do this in a pretty awful way: first we typecheck any arguments
2530 // that are not anonymous functions, then we typecheck the anonymous
2531 // functions. This is so that we have more information about the types
2532 // of arguments when we typecheck the functions. This isn't really the
2533 // right way to do this.
2534 let xs = [false, true];
2535 let mut any_diverges = false; // has any of the arguments diverged?
2536 let mut warned = false; // have we already warned about unreachable code?
2537 for check_blocks in &xs {
2538 let check_blocks = *check_blocks;
2539 debug!("check_blocks={}", check_blocks);
2541 // More awful hacks: before we check argument types, try to do
2542 // an "opportunistic" vtable resolution of any trait bounds on
2543 // the call. This helps coercions.
2545 self.select_obligations_where_possible();
2548 // For variadic functions, we don't have a declared type for all of
2549 // the arguments hence we only do our usual type checking with
2550 // the arguments who's types we do know.
2551 let t = if variadic {
2553 } else if tuple_arguments == TupleArguments {
2558 for (i, arg) in args.iter().take(t).enumerate() {
2559 if any_diverges && !warned {
2562 .add_lint(lint::builtin::UNREACHABLE_CODE,
2565 "unreachable expression".to_string());
2568 let is_block = match arg.node {
2569 hir::ExprClosure(..) => true,
2573 if is_block == check_blocks {
2574 debug!("checking the argument");
2575 let formal_ty = formal_tys[i];
2577 // The special-cased logic below has three functions:
2578 // 1. Provide as good of an expected type as possible.
2579 let expected = expected_arg_tys.get(i).map(|&ty| {
2580 Expectation::rvalue_hint(self, ty)
2583 self.check_expr_with_expectation(&arg,
2584 expected.unwrap_or(ExpectHasType(formal_ty)));
2585 // 2. Coerce to the most detailed type that could be coerced
2586 // to, which is `expected_ty` if `rvalue_hint` returns an
2587 // `ExpectHasType(expected_ty)`, or the `formal_ty` otherwise.
2588 let coerce_ty = expected.and_then(|e| e.only_has_type(self));
2589 self.demand_coerce(&arg, coerce_ty.unwrap_or(formal_ty));
2591 // 3. Relate the expected type and the formal one,
2592 // if the expected type was used for the coercion.
2593 coerce_ty.map(|ty| self.demand_suptype(arg.span, formal_ty, ty));
2596 if let Some(&arg_ty) = self.tables.borrow().node_types.get(&arg.id) {
2597 // FIXME(canndrew): This is_never should probably be an is_uninhabited
2598 any_diverges = any_diverges ||
2599 self.type_var_diverges(arg_ty) ||
2603 if any_diverges && !warned {
2604 let parent = self.tcx.map.get_parent_node(args[0].id);
2607 .add_lint(lint::builtin::UNREACHABLE_CODE,
2610 "unreachable call".to_string());
2616 // We also need to make sure we at least write the ty of the other
2617 // arguments which we skipped above.
2619 for arg in args.iter().skip(expected_arg_count) {
2620 self.check_expr(&arg);
2622 // There are a few types which get autopromoted when passed via varargs
2623 // in C but we just error out instead and require explicit casts.
2624 let arg_ty = self.structurally_resolved_type(arg.span,
2625 self.expr_ty(&arg));
2627 ty::TyFloat(ast::FloatTy::F32) => {
2628 self.type_error_message(arg.span, |t| {
2629 format!("can't pass an `{}` to variadic \
2630 function, cast to `c_double`", t)
2633 ty::TyInt(ast::IntTy::I8) | ty::TyInt(ast::IntTy::I16) | ty::TyBool => {
2634 self.type_error_message(arg.span, |t| {
2635 format!("can't pass `{}` to variadic \
2636 function, cast to `c_int`",
2640 ty::TyUint(ast::UintTy::U8) | ty::TyUint(ast::UintTy::U16) => {
2641 self.type_error_message(arg.span, |t| {
2642 format!("can't pass `{}` to variadic \
2643 function, cast to `c_uint`",
2647 ty::TyFnDef(_, _, f) => {
2648 let ptr_ty = self.tcx.mk_fn_ptr(f);
2649 let ptr_ty = self.resolve_type_vars_if_possible(&ptr_ty);
2650 self.type_error_message(arg.span,
2652 format!("can't pass `{}` to variadic \
2653 function, cast to `{}`", t, ptr_ty)
2662 fn err_args(&self, len: usize) -> Vec<Ty<'tcx>> {
2663 (0..len).map(|_| self.tcx.types.err).collect()
2666 fn write_call(&self,
2667 call_expr: &hir::Expr,
2669 self.write_ty(call_expr.id, output);
2672 // AST fragment checking
2675 expected: Expectation<'tcx>)
2681 ast::LitKind::Str(..) => tcx.mk_static_str(),
2682 ast::LitKind::ByteStr(ref v) => {
2683 tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic),
2684 tcx.mk_array(tcx.types.u8, v.len()))
2686 ast::LitKind::Byte(_) => tcx.types.u8,
2687 ast::LitKind::Char(_) => tcx.types.char,
2688 ast::LitKind::Int(_, ast::LitIntType::Signed(t)) => tcx.mk_mach_int(t),
2689 ast::LitKind::Int(_, ast::LitIntType::Unsigned(t)) => tcx.mk_mach_uint(t),
2690 ast::LitKind::Int(_, ast::LitIntType::Unsuffixed) => {
2691 let opt_ty = expected.to_option(self).and_then(|ty| {
2693 ty::TyInt(_) | ty::TyUint(_) => Some(ty),
2694 ty::TyChar => Some(tcx.types.u8),
2695 ty::TyRawPtr(..) => Some(tcx.types.usize),
2696 ty::TyFnDef(..) | ty::TyFnPtr(_) => Some(tcx.types.usize),
2700 opt_ty.unwrap_or_else(
2701 || tcx.mk_int_var(self.next_int_var_id()))
2703 ast::LitKind::Float(_, t) => tcx.mk_mach_float(t),
2704 ast::LitKind::FloatUnsuffixed(_) => {
2705 let opt_ty = expected.to_option(self).and_then(|ty| {
2707 ty::TyFloat(_) => Some(ty),
2711 opt_ty.unwrap_or_else(
2712 || tcx.mk_float_var(self.next_float_var_id()))
2714 ast::LitKind::Bool(_) => tcx.types.bool
2718 fn check_expr_eq_type(&self,
2719 expr: &'gcx hir::Expr,
2720 expected: Ty<'tcx>) {
2721 self.check_expr_with_hint(expr, expected);
2722 self.demand_eqtype(expr.span, expected, self.expr_ty(expr));
2725 pub fn check_expr_has_type(&self,
2726 expr: &'gcx hir::Expr,
2727 expected: Ty<'tcx>) {
2728 self.check_expr_with_hint(expr, expected);
2729 self.demand_suptype(expr.span, expected, self.expr_ty(expr));
2732 fn check_expr_coercable_to_type(&self,
2733 expr: &'gcx hir::Expr,
2734 expected: Ty<'tcx>) {
2735 self.check_expr_with_hint(expr, expected);
2736 self.demand_coerce(expr, expected);
2739 fn check_expr_with_hint(&self, expr: &'gcx hir::Expr,
2740 expected: Ty<'tcx>) {
2741 self.check_expr_with_expectation(expr, ExpectHasType(expected))
2744 fn check_expr_with_expectation(&self,
2745 expr: &'gcx hir::Expr,
2746 expected: Expectation<'tcx>) {
2747 self.check_expr_with_expectation_and_lvalue_pref(expr, expected, NoPreference)
2750 fn check_expr(&self, expr: &'gcx hir::Expr) {
2751 self.check_expr_with_expectation(expr, NoExpectation)
2754 fn check_expr_with_lvalue_pref(&self, expr: &'gcx hir::Expr,
2755 lvalue_pref: LvaluePreference) {
2756 self.check_expr_with_expectation_and_lvalue_pref(expr, NoExpectation, lvalue_pref)
2759 // determine the `self` type, using fresh variables for all variables
2760 // declared on the impl declaration e.g., `impl<A,B> for Vec<(A,B)>`
2761 // would return ($0, $1) where $0 and $1 are freshly instantiated type
2763 pub fn impl_self_ty(&self,
2764 span: Span, // (potential) receiver for this impl
2766 -> TypeAndSubsts<'tcx> {
2767 let ity = self.tcx.lookup_item_type(did);
2768 debug!("impl_self_ty: ity={:?}", ity);
2770 let substs = self.fresh_substs_for_item(span, did);
2771 let substd_ty = self.instantiate_type_scheme(span, &substs, &ity.ty);
2773 TypeAndSubsts { substs: substs, ty: substd_ty }
2776 /// Unifies the return type with the expected type early, for more coercions
2777 /// and forward type information on the argument expressions.
2778 fn expected_types_for_fn_args(&self,
2780 expected_ret: Expectation<'tcx>,
2781 formal_ret: Ty<'tcx>,
2782 formal_args: &[Ty<'tcx>])
2784 let expected_args = expected_ret.only_has_type(self).and_then(|ret_ty| {
2785 self.commit_regions_if_ok(|| {
2786 // Attempt to apply a subtyping relationship between the formal
2787 // return type (likely containing type variables if the function
2788 // is polymorphic) and the expected return type.
2789 // No argument expectations are produced if unification fails.
2790 let origin = TypeOrigin::Misc(call_span);
2791 let ures = self.sub_types(false, origin, formal_ret, ret_ty);
2792 // FIXME(#15760) can't use try! here, FromError doesn't default
2793 // to identity so the resulting type is not constrained.
2795 // FIXME(#32730) propagate obligations
2796 Ok(InferOk { obligations, .. }) => assert!(obligations.is_empty()),
2797 Err(e) => return Err(e),
2800 // Record all the argument types, with the substitutions
2801 // produced from the above subtyping unification.
2802 Ok(formal_args.iter().map(|ty| {
2803 self.resolve_type_vars_if_possible(ty)
2806 }).unwrap_or(vec![]);
2807 debug!("expected_types_for_fn_args(formal={:?} -> {:?}, expected={:?} -> {:?})",
2808 formal_args, formal_ret,
2809 expected_args, expected_ret);
2813 // Checks a method call.
2814 fn check_method_call(&self,
2815 expr: &'gcx hir::Expr,
2816 method_name: Spanned<ast::Name>,
2817 args: &'gcx [P<hir::Expr>],
2819 expected: Expectation<'tcx>,
2820 lvalue_pref: LvaluePreference) {
2821 let rcvr = &args[0];
2822 self.check_expr_with_lvalue_pref(&rcvr, lvalue_pref);
2824 // no need to check for bot/err -- callee does that
2825 let expr_t = self.structurally_resolved_type(expr.span, self.expr_ty(&rcvr));
2827 let tps = tps.iter().map(|ast_ty| self.to_ty(&ast_ty)).collect::<Vec<_>>();
2828 let fn_ty = match self.lookup_method(method_name.span,
2835 let method_ty = method.ty;
2836 let method_call = MethodCall::expr(expr.id);
2837 self.tables.borrow_mut().method_map.insert(method_call, method);
2841 if method_name.node != keywords::Invalid.name() {
2842 self.report_method_error(method_name.span, expr_t,
2843 method_name.node, Some(rcvr), error);
2845 self.write_error(expr.id);
2850 // Call the generic checker.
2851 let ret_ty = self.check_method_argument_types(method_name.span, fn_ty,
2856 self.write_call(expr, ret_ty);
2859 // A generic function for checking the then and else in an if
2861 fn check_then_else(&self,
2862 cond_expr: &'gcx hir::Expr,
2863 then_blk: &'gcx hir::Block,
2864 opt_else_expr: Option<&'gcx hir::Expr>,
2867 expected: Expectation<'tcx>) {
2868 self.check_expr_has_type(cond_expr, self.tcx.types.bool);
2870 let expected = expected.adjust_for_branches(self);
2871 self.check_block_with_expected(then_blk, expected);
2872 let then_ty = self.node_ty(then_blk.id);
2874 let unit = self.tcx.mk_nil();
2875 let (origin, expected, found, result) =
2876 if let Some(else_expr) = opt_else_expr {
2877 self.check_expr_with_expectation(else_expr, expected);
2878 let else_ty = self.expr_ty(else_expr);
2879 let origin = TypeOrigin::IfExpression(sp);
2881 // Only try to coerce-unify if we have a then expression
2882 // to assign coercions to, otherwise it's () or diverging.
2883 let result = if let Some(ref then) = then_blk.expr {
2884 let res = self.try_find_coercion_lub(origin, || Some(&**then),
2885 then_ty, else_expr);
2887 // In case we did perform an adjustment, we have to update
2888 // the type of the block, because old trans still uses it.
2889 let adj = self.tables.borrow().adjustments.get(&then.id).cloned();
2890 if res.is_ok() && adj.is_some() {
2891 self.write_ty(then_blk.id, self.adjust_expr_ty(then, adj.as_ref()));
2896 self.commit_if_ok(|_| {
2897 let trace = TypeTrace::types(origin, true, then_ty, else_ty);
2898 self.lub(true, trace, &then_ty, &else_ty)
2899 .map(|InferOk { value, obligations }| {
2900 // FIXME(#32730) propagate obligations
2901 assert!(obligations.is_empty());
2906 (origin, then_ty, else_ty, result)
2908 let origin = TypeOrigin::IfExpressionWithNoElse(sp);
2909 (origin, unit, then_ty,
2910 self.eq_types(true, origin, unit, then_ty)
2911 .map(|InferOk { obligations, .. }| {
2912 // FIXME(#32730) propagate obligations
2913 assert!(obligations.is_empty());
2918 let if_ty = match result {
2920 if self.expr_ty(cond_expr).references_error() {
2927 self.report_mismatched_types(origin, expected, found, e);
2932 self.write_ty(id, if_ty);
2935 // Check field access expressions
2936 fn check_field(&self,
2937 expr: &'gcx hir::Expr,
2938 lvalue_pref: LvaluePreference,
2939 base: &'gcx hir::Expr,
2940 field: &Spanned<ast::Name>) {
2941 self.check_expr_with_lvalue_pref(base, lvalue_pref);
2942 let expr_t = self.structurally_resolved_type(expr.span,
2943 self.expr_ty(base));
2944 let mut private_candidate = None;
2945 let mut autoderef = self.autoderef(expr.span, expr_t);
2946 while let Some((base_t, autoderefs)) = autoderef.next() {
2947 if let ty::TyStruct(base_def, substs) = base_t.sty {
2948 debug!("struct named {:?}", base_t);
2949 if let Some(field) = base_def.struct_variant().find_field_named(field.node) {
2950 let field_ty = self.field_ty(expr.span, field, substs);
2951 if field.vis.is_accessible_from(self.body_id, &self.tcx().map) {
2952 autoderef.finalize(lvalue_pref, Some(base));
2953 self.write_ty(expr.id, field_ty);
2954 self.write_autoderef_adjustment(base.id, autoderefs);
2957 private_candidate = Some((base_def.did, field_ty));
2961 autoderef.unambiguous_final_ty();
2963 if let Some((did, field_ty)) = private_candidate {
2964 let struct_path = self.tcx().item_path_str(did);
2965 self.write_ty(expr.id, field_ty);
2966 let msg = format!("field `{}` of struct `{}` is private", field.node, struct_path);
2967 let mut err = self.tcx().sess.struct_span_err(expr.span, &msg);
2968 // Also check if an accessible method exists, which is often what is meant.
2969 if self.method_exists(field.span, field.node, expr_t, expr.id, false) {
2970 err.note(&format!("a method `{}` also exists, perhaps you wish to call it",
2974 } else if field.node == keywords::Invalid.name() {
2975 self.write_error(expr.id);
2976 } else if self.method_exists(field.span, field.node, expr_t, expr.id, true) {
2977 self.type_error_struct(field.span, |actual| {
2978 format!("attempted to take value of method `{}` on type \
2979 `{}`", field.node, actual)
2981 .help("maybe a `()` to call it is missing? \
2982 If not, try an anonymous function")
2984 self.write_error(expr.id);
2986 let mut err = self.type_error_struct(expr.span, |actual| {
2987 format!("attempted access of field `{}` on type `{}`, \
2988 but no field with that name was found",
2991 if let ty::TyRawPtr(..) = expr_t.sty {
2992 err.note(&format!("`{0}` is a native pointer; perhaps you need to deref with \
2993 `(*{0}).{1}`", pprust::expr_to_string(base), field.node));
2995 if let ty::TyStruct(def, _) = expr_t.sty {
2996 Self::suggest_field_names(&mut err, def.struct_variant(), field, vec![]);
2999 self.write_error(expr.id);
3003 // displays hints about the closest matches in field names
3004 fn suggest_field_names(err: &mut DiagnosticBuilder,
3005 variant: ty::VariantDef<'tcx>,
3006 field: &Spanned<ast::Name>,
3007 skip : Vec<InternedString>) {
3008 let name = field.node.as_str();
3009 let names = variant.fields.iter().filter_map(|field| {
3010 // ignore already set fields and private fields from non-local crates
3011 if skip.iter().any(|x| *x == field.name.as_str()) ||
3012 (variant.did.krate != LOCAL_CRATE && field.vis != Visibility::Public) {
3019 // only find fits with at least one matching letter
3020 if let Some(name) = find_best_match_for_name(names, &name, Some(name.len())) {
3021 err.span_help(field.span,
3022 &format!("did you mean `{}`?", name));
3026 // Check tuple index expressions
3027 fn check_tup_field(&self,
3028 expr: &'gcx hir::Expr,
3029 lvalue_pref: LvaluePreference,
3030 base: &'gcx hir::Expr,
3031 idx: codemap::Spanned<usize>) {
3032 self.check_expr_with_lvalue_pref(base, lvalue_pref);
3033 let expr_t = self.structurally_resolved_type(expr.span,
3034 self.expr_ty(base));
3035 let mut private_candidate = None;
3036 let mut tuple_like = false;
3037 let mut autoderef = self.autoderef(expr.span, expr_t);
3038 while let Some((base_t, autoderefs)) = autoderef.next() {
3039 let field = match base_t.sty {
3040 ty::TyStruct(base_def, substs) => {
3041 tuple_like = base_def.struct_variant().kind == ty::VariantKind::Tuple;
3042 if !tuple_like { continue }
3044 debug!("tuple struct named {:?}", base_t);
3045 base_def.struct_variant().fields.get(idx.node).and_then(|field| {
3046 let field_ty = self.field_ty(expr.span, field, substs);
3047 private_candidate = Some((base_def.did, field_ty));
3048 if field.vis.is_accessible_from(self.body_id, &self.tcx().map) {
3055 ty::TyTuple(ref v) => {
3057 v.get(idx.node).cloned()
3062 if let Some(field_ty) = field {
3063 autoderef.finalize(lvalue_pref, Some(base));
3064 self.write_ty(expr.id, field_ty);
3065 self.write_autoderef_adjustment(base.id, autoderefs);
3069 autoderef.unambiguous_final_ty();
3071 if let Some((did, field_ty)) = private_candidate {
3072 let struct_path = self.tcx().item_path_str(did);
3073 let msg = format!("field `{}` of struct `{}` is private", idx.node, struct_path);
3074 self.tcx().sess.span_err(expr.span, &msg);
3075 self.write_ty(expr.id, field_ty);
3079 self.type_error_message(
3083 format!("attempted out-of-bounds tuple index `{}` on \
3088 format!("attempted tuple index `{}` on type `{}`, but the \
3089 type was not a tuple or tuple struct",
3096 self.write_error(expr.id);
3099 fn report_unknown_field(&self,
3101 variant: ty::VariantDef<'tcx>,
3103 skip_fields: &[hir::Field]) {
3104 let mut err = self.type_error_struct_with_diag(
3106 |actual| if let ty::TyEnum(..) = ty.sty {
3107 struct_span_err!(self.tcx.sess, field.name.span, E0559,
3108 "struct variant `{}::{}` has no field named `{}`",
3109 actual, variant.name.as_str(), field.name.node)
3111 struct_span_err!(self.tcx.sess, field.name.span, E0560,
3112 "structure `{}` has no field named `{}`",
3113 actual, field.name.node)
3116 // prevent all specified fields from being suggested
3117 let skip_fields = skip_fields.iter().map(|ref x| x.name.node.as_str());
3118 Self::suggest_field_names(&mut err, variant, &field.name, skip_fields.collect());
3122 fn check_expr_struct_fields(&self,
3125 variant: ty::VariantDef<'tcx>,
3126 ast_fields: &'gcx [hir::Field],
3127 check_completeness: bool) {
3129 let substs = match adt_ty.sty {
3130 ty::TyStruct(_, substs) | ty::TyEnum(_, substs) => substs,
3131 _ => span_bug!(span, "non-ADT passed to check_expr_struct_fields")
3134 let mut remaining_fields = FnvHashMap();
3135 for field in &variant.fields {
3136 remaining_fields.insert(field.name, field);
3139 let mut seen_fields = FnvHashMap();
3141 let mut error_happened = false;
3143 // Typecheck each field.
3144 for field in ast_fields {
3145 let expected_field_type;
3147 if let Some(v_field) = remaining_fields.remove(&field.name.node) {
3148 expected_field_type = self.field_ty(field.span, v_field, substs);
3150 seen_fields.insert(field.name.node, field.span);
3152 error_happened = true;
3153 expected_field_type = tcx.types.err;
3154 if let Some(_) = variant.find_field_named(field.name.node) {
3155 let mut err = struct_span_err!(self.tcx.sess,
3158 "field `{}` specified more than once",
3161 err.span_label(field.name.span, &format!("used more than once"));
3163 if let Some(prev_span) = seen_fields.get(&field.name.node) {
3164 err.span_label(*prev_span, &format!("first use of `{}`", field.name.node));
3169 self.report_unknown_field(adt_ty, variant, field, ast_fields);
3173 // Make sure to give a type to the field even if there's
3174 // an error, so we can continue typechecking
3175 self.check_expr_coercable_to_type(&field.expr, expected_field_type);
3178 // Make sure the programmer specified all the fields.
3179 if check_completeness &&
3181 !remaining_fields.is_empty()
3183 span_err!(tcx.sess, span, E0063,
3184 "missing field{} {} in initializer of `{}`",
3185 if remaining_fields.len() == 1 {""} else {"s"},
3186 remaining_fields.keys()
3187 .map(|n| format!("`{}`", n))
3188 .collect::<Vec<_>>()
3195 fn check_struct_fields_on_error(&self,
3197 fields: &'gcx [hir::Field],
3198 base_expr: &'gcx Option<P<hir::Expr>>) {
3199 // Make sure to still write the types
3200 // otherwise we might ICE
3201 self.write_error(id);
3202 for field in fields {
3203 self.check_expr(&field.expr);
3206 Some(ref base) => self.check_expr(&base),
3211 pub fn check_struct_path(&self,
3213 node_id: ast::NodeId,
3215 -> Option<(ty::VariantDef<'tcx>, Ty<'tcx>)> {
3216 let def = self.finish_resolving_struct_path(path, node_id, span);
3217 let variant = match def {
3219 self.set_tainted_by_errors();
3222 Def::Variant(type_did, _) | Def::Struct(type_did) => {
3223 Some((type_did, self.tcx.expect_variant_def(def)))
3225 Def::TyAlias(did) => {
3226 if let Some(&ty::TyStruct(adt, _)) = self.tcx.opt_lookup_item_type(did)
3227 .map(|scheme| &scheme.ty.sty) {
3228 Some((did, adt.struct_variant()))
3236 if let Some((def_id, variant)) = variant {
3237 if variant.kind == ty::VariantKind::Tuple &&
3238 !self.tcx.sess.features.borrow().relaxed_adts {
3239 emit_feature_err(&self.tcx.sess.parse_sess.span_diagnostic,
3240 "relaxed_adts", span, GateIssue::Language,
3241 "tuple structs and variants in struct patterns are unstable");
3243 let ty = self.instantiate_type_path(def_id, path, node_id);
3246 struct_span_err!(self.tcx.sess, path.span, E0071,
3247 "`{}` does not name a struct or a struct variant",
3248 pprust::path_to_string(path))
3249 .span_label(path.span, &format!("not a struct"))
3255 fn check_expr_struct(&self,
3258 fields: &'gcx [hir::Field],
3259 base_expr: &'gcx Option<P<hir::Expr>>)
3261 // Find the relevant variant
3262 let (variant, expr_ty) = if let Some(variant_ty) = self.check_struct_path(path, expr.id,
3266 self.check_struct_fields_on_error(expr.id, fields, base_expr);
3270 self.check_expr_struct_fields(expr_ty, path.span, variant, fields,
3271 base_expr.is_none());
3272 if let &Some(ref base_expr) = base_expr {
3273 self.check_expr_has_type(base_expr, expr_ty);
3275 ty::TyStruct(adt, substs) => {
3276 self.tables.borrow_mut().fru_field_types.insert(
3278 adt.struct_variant().fields.iter().map(|f| {
3279 self.normalize_associated_types_in(
3280 expr.span, &f.ty(self.tcx, substs)
3286 span_err!(self.tcx.sess, base_expr.span, E0436,
3287 "functional record update syntax requires a struct");
3295 /// If an expression has any sub-expressions that result in a type error,
3296 /// inspecting that expression's type with `ty.references_error()` will return
3297 /// true. Likewise, if an expression is known to diverge, inspecting its
3298 /// type with `ty::type_is_bot` will return true (n.b.: since Rust is
3299 /// strict, _|_ can appear in the type of an expression that does not,
3300 /// itself, diverge: for example, fn() -> _|_.)
3301 /// Note that inspecting a type's structure *directly* may expose the fact
3302 /// that there are actually multiple representations for `TyError`, so avoid
3303 /// that when err needs to be handled differently.
3304 fn check_expr_with_expectation_and_lvalue_pref(&self,
3305 expr: &'gcx hir::Expr,
3306 expected: Expectation<'tcx>,
3307 lvalue_pref: LvaluePreference) {
3308 debug!(">> typechecking: expr={:?} expected={:?}",
3314 hir::ExprBox(ref subexpr) => {
3315 let expected_inner = expected.to_option(self).map_or(NoExpectation, |ty| {
3317 ty::TyBox(ty) => Expectation::rvalue_hint(self, ty),
3321 self.check_expr_with_expectation(subexpr, expected_inner);
3322 let referent_ty = self.expr_ty(&subexpr);
3323 self.write_ty(id, tcx.mk_box(referent_ty));
3326 hir::ExprLit(ref lit) => {
3327 let typ = self.check_lit(&lit, expected);
3328 self.write_ty(id, typ);
3330 hir::ExprBinary(op, ref lhs, ref rhs) => {
3331 self.check_binop(expr, op, lhs, rhs);
3333 hir::ExprAssignOp(op, ref lhs, ref rhs) => {
3334 self.check_binop_assign(expr, op, lhs, rhs);
3336 hir::ExprUnary(unop, ref oprnd) => {
3337 let expected_inner = match unop {
3338 hir::UnNot | hir::UnNeg => {
3345 let lvalue_pref = match unop {
3346 hir::UnDeref => lvalue_pref,
3349 self.check_expr_with_expectation_and_lvalue_pref(&oprnd,
3352 let mut oprnd_t = self.expr_ty(&oprnd);
3354 if !oprnd_t.references_error() {
3357 oprnd_t = self.structurally_resolved_type(expr.span, oprnd_t);
3359 if let Some(mt) = oprnd_t.builtin_deref(true, NoPreference) {
3361 } else if let Some(method) = self.try_overloaded_deref(
3362 expr.span, Some(&oprnd), oprnd_t, lvalue_pref) {
3363 oprnd_t = self.make_overloaded_lvalue_return_type(method).ty;
3364 self.tables.borrow_mut().method_map.insert(MethodCall::expr(expr.id),
3367 self.type_error_message(expr.span, |actual| {
3368 format!("type `{}` cannot be \
3369 dereferenced", actual)
3371 oprnd_t = tcx.types.err;
3375 oprnd_t = self.structurally_resolved_type(oprnd.span,
3377 if !(oprnd_t.is_integral() || oprnd_t.sty == ty::TyBool) {
3378 oprnd_t = self.check_user_unop("!", "not",
3379 tcx.lang_items.not_trait(),
3380 expr, &oprnd, oprnd_t, unop);
3384 oprnd_t = self.structurally_resolved_type(oprnd.span,
3386 if !(oprnd_t.is_integral() || oprnd_t.is_fp()) {
3387 oprnd_t = self.check_user_unop("-", "neg",
3388 tcx.lang_items.neg_trait(),
3389 expr, &oprnd, oprnd_t, unop);
3394 self.write_ty(id, oprnd_t);
3396 hir::ExprAddrOf(mutbl, ref oprnd) => {
3397 let hint = expected.only_has_type(self).map_or(NoExpectation, |ty| {
3399 ty::TyRef(_, ref mt) | ty::TyRawPtr(ref mt) => {
3400 if self.tcx.expr_is_lval(&oprnd) {
3401 // Lvalues may legitimately have unsized types.
3402 // For example, dereferences of a fat pointer and
3403 // the last field of a struct can be unsized.
3404 ExpectHasType(mt.ty)
3406 Expectation::rvalue_hint(self, mt.ty)
3412 let lvalue_pref = LvaluePreference::from_mutbl(mutbl);
3413 self.check_expr_with_expectation_and_lvalue_pref(&oprnd, hint, lvalue_pref);
3415 let tm = ty::TypeAndMut { ty: self.expr_ty(&oprnd), mutbl: mutbl };
3416 let oprnd_t = if tm.ty.references_error() {
3419 // Note: at this point, we cannot say what the best lifetime
3420 // is to use for resulting pointer. We want to use the
3421 // shortest lifetime possible so as to avoid spurious borrowck
3422 // errors. Moreover, the longest lifetime will depend on the
3423 // precise details of the value whose address is being taken
3424 // (and how long it is valid), which we don't know yet until type
3425 // inference is complete.
3427 // Therefore, here we simply generate a region variable. The
3428 // region inferencer will then select the ultimate value.
3429 // Finally, borrowck is charged with guaranteeing that the
3430 // value whose address was taken can actually be made to live
3431 // as long as it needs to live.
3432 let region = self.next_region_var(infer::AddrOfRegion(expr.span));
3433 tcx.mk_ref(region, tm)
3435 self.write_ty(id, oprnd_t);
3437 hir::ExprPath(ref opt_qself, ref path) => {
3438 let opt_self_ty = opt_qself.as_ref().map(|qself| self.to_ty(&qself.ty));
3439 let (def, opt_ty, segments) = self.resolve_ty_and_def_ufcs(opt_self_ty, path,
3440 expr.id, expr.span);
3441 if def != Def::Err {
3442 self.instantiate_value_path(segments, opt_ty, def, expr.span, id);
3444 self.set_tainted_by_errors();
3445 self.write_error(id);
3448 // We always require that the type provided as the value for
3449 // a type parameter outlives the moment of instantiation.
3450 self.opt_node_ty_substs(expr.id, |item_substs| {
3451 self.add_wf_bounds(&item_substs.substs, expr);
3454 hir::ExprInlineAsm(_, ref outputs, ref inputs) => {
3455 for output in outputs {
3456 self.check_expr(output);
3458 for input in inputs {
3459 self.check_expr(input);
3463 hir::ExprBreak(_) => { self.write_never(id); }
3464 hir::ExprAgain(_) => { self.write_never(id); }
3465 hir::ExprRet(ref expr_opt) => {
3466 if let Some(ref e) = *expr_opt {
3467 self.check_expr_coercable_to_type(&e, self.ret_ty);
3469 let eq_result = self.eq_types(false,
3470 TypeOrigin::Misc(expr.span),
3473 // FIXME(#32730) propagate obligations
3474 .map(|InferOk { obligations, .. }| assert!(obligations.is_empty()));
3475 if eq_result.is_err() {
3476 struct_span_err!(tcx.sess, expr.span, E0069,
3477 "`return;` in a function whose return type is not `()`")
3478 .span_label(expr.span, &format!("return type is not ()"))
3482 self.write_never(id);
3484 hir::ExprAssign(ref lhs, ref rhs) => {
3485 self.check_expr_with_lvalue_pref(&lhs, PreferMutLvalue);
3488 if !tcx.expr_is_lval(&lhs) {
3490 tcx.sess, expr.span, E0070,
3491 "invalid left-hand side expression")
3494 &format!("left-hand of expression not valid"))
3498 let lhs_ty = self.expr_ty(&lhs);
3499 self.check_expr_coercable_to_type(&rhs, lhs_ty);
3500 let rhs_ty = self.expr_ty(&rhs);
3502 self.require_expr_have_sized_type(&lhs, traits::AssignmentLhsSized);
3504 if lhs_ty.references_error() || rhs_ty.references_error() {
3505 self.write_error(id);
3510 hir::ExprIf(ref cond, ref then_blk, ref opt_else_expr) => {
3511 self.check_then_else(&cond, &then_blk, opt_else_expr.as_ref().map(|e| &**e),
3512 id, expr.span, expected);
3514 hir::ExprWhile(ref cond, ref body, _) => {
3515 self.check_expr_has_type(&cond, tcx.types.bool);
3516 self.check_block_no_value(&body);
3517 let cond_ty = self.expr_ty(&cond);
3518 let body_ty = self.node_ty(body.id);
3519 if cond_ty.references_error() || body_ty.references_error() {
3520 self.write_error(id);
3526 hir::ExprLoop(ref body, _) => {
3527 self.check_block_no_value(&body);
3528 if !may_break(tcx, expr.id, &body) {
3529 self.write_never(id);
3534 hir::ExprMatch(ref discrim, ref arms, match_src) => {
3535 self.check_match(expr, &discrim, arms, expected, match_src);
3537 hir::ExprClosure(capture, ref decl, ref body, _) => {
3538 self.check_expr_closure(expr, capture, &decl, &body, expected);
3540 hir::ExprBlock(ref b) => {
3541 self.check_block_with_expected(&b, expected);
3542 self.write_ty(id, self.node_ty(b.id));
3544 hir::ExprCall(ref callee, ref args) => {
3545 self.check_call(expr, &callee, &args[..], expected);
3547 // we must check that return type of called functions is WF:
3548 let ret_ty = self.expr_ty(expr);
3549 self.register_wf_obligation(ret_ty, expr.span, traits::MiscObligation);
3551 hir::ExprMethodCall(name, ref tps, ref args) => {
3552 self.check_method_call(expr, name, &args[..], &tps[..], expected, lvalue_pref);
3553 let arg_tys = args.iter().map(|a| self.expr_ty(&a));
3554 let args_err = arg_tys.fold(false, |rest_err, a| rest_err || a.references_error());
3556 self.write_error(id);
3559 hir::ExprCast(ref e, ref t) => {
3560 if let hir::TyFixedLengthVec(_, ref count_expr) = t.node {
3561 self.check_expr_with_hint(&count_expr, tcx.types.usize);
3564 // Find the type of `e`. Supply hints based on the type we are casting to,
3566 let t_cast = self.to_ty(t);
3567 let t_cast = self.resolve_type_vars_if_possible(&t_cast);
3568 self.check_expr_with_expectation(e, ExpectCastableToType(t_cast));
3569 let t_expr = self.expr_ty(e);
3570 let t_cast = self.resolve_type_vars_if_possible(&t_cast);
3572 // Eagerly check for some obvious errors.
3573 if t_expr.references_error() || t_cast.references_error() {
3574 self.write_error(id);
3576 // Write a type for the whole expression, assuming everything is going
3578 self.write_ty(id, t_cast);
3580 // Defer other checks until we're done type checking.
3581 let mut deferred_cast_checks = self.deferred_cast_checks.borrow_mut();
3582 match cast::CastCheck::new(self, e, t_expr, t_cast, t.span, expr.span) {
3584 deferred_cast_checks.push(cast_check);
3586 Err(ErrorReported) => {
3587 self.write_error(id);
3592 hir::ExprType(ref e, ref t) => {
3593 let typ = self.to_ty(&t);
3594 self.check_expr_eq_type(&e, typ);
3595 self.write_ty(id, typ);
3597 hir::ExprVec(ref args) => {
3598 let uty = expected.to_option(self).and_then(|uty| {
3600 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3605 let mut unified = self.next_ty_var();
3606 let coerce_to = uty.unwrap_or(unified);
3608 for (i, e) in args.iter().enumerate() {
3609 self.check_expr_with_hint(e, coerce_to);
3610 let e_ty = self.expr_ty(e);
3611 let origin = TypeOrigin::Misc(e.span);
3613 // Special-case the first element, as it has no "previous expressions".
3614 let result = if i == 0 {
3615 self.try_coerce(e, coerce_to)
3617 let prev_elems = || args[..i].iter().map(|e| &**e);
3618 self.try_find_coercion_lub(origin, prev_elems, unified, e)
3622 Ok(ty) => unified = ty,
3624 self.report_mismatched_types(origin, unified, e_ty, e);
3628 self.write_ty(id, tcx.mk_array(unified, args.len()));
3630 hir::ExprRepeat(ref element, ref count_expr) => {
3631 self.check_expr_has_type(&count_expr, tcx.types.usize);
3632 let count = eval_length(self.tcx.global_tcx(), &count_expr, "repeat count")
3635 let uty = match expected {
3636 ExpectHasType(uty) => {
3638 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3645 let (element_ty, t) = match uty {
3647 self.check_expr_coercable_to_type(&element, uty);
3651 let t: Ty = self.next_ty_var();
3652 self.check_expr_has_type(&element, t);
3653 (self.expr_ty(&element), t)
3658 // For [foo, ..n] where n > 1, `foo` must have
3660 self.require_type_meets(t, expr.span, traits::RepeatVec, ty::BoundCopy);
3663 if element_ty.references_error() {
3664 self.write_error(id);
3666 let t = tcx.mk_array(t, count);
3667 self.write_ty(id, t);
3670 hir::ExprTup(ref elts) => {
3671 let flds = expected.only_has_type(self).and_then(|ty| {
3673 ty::TyTuple(ref flds) => Some(&flds[..]),
3677 let mut err_field = false;
3679 let elt_ts = elts.iter().enumerate().map(|(i, e)| {
3680 let t = match flds {
3681 Some(ref fs) if i < fs.len() => {
3683 self.check_expr_coercable_to_type(&e, ety);
3687 self.check_expr_with_expectation(&e, NoExpectation);
3691 err_field = err_field || t.references_error();
3695 self.write_error(id);
3697 let typ = tcx.mk_tup(elt_ts);
3698 self.write_ty(id, typ);
3701 hir::ExprStruct(ref path, ref fields, ref base_expr) => {
3702 self.check_expr_struct(expr, path, fields, base_expr);
3704 self.require_expr_have_sized_type(expr, traits::StructInitializerSized);
3706 hir::ExprField(ref base, ref field) => {
3707 self.check_field(expr, lvalue_pref, &base, field);
3709 hir::ExprTupField(ref base, idx) => {
3710 self.check_tup_field(expr, lvalue_pref, &base, idx);
3712 hir::ExprIndex(ref base, ref idx) => {
3713 self.check_expr_with_lvalue_pref(&base, lvalue_pref);
3714 self.check_expr(&idx);
3716 let base_t = self.expr_ty(&base);
3717 let idx_t = self.expr_ty(&idx);
3719 if base_t.references_error() {
3720 self.write_ty(id, base_t);
3721 } else if idx_t.references_error() {
3722 self.write_ty(id, idx_t);
3724 let base_t = self.structurally_resolved_type(expr.span, base_t);
3725 match self.lookup_indexing(expr, base, base_t, idx_t, lvalue_pref) {
3726 Some((index_ty, element_ty)) => {
3727 let idx_expr_ty = self.expr_ty(idx);
3728 self.demand_eqtype(expr.span, index_ty, idx_expr_ty);
3729 self.write_ty(id, element_ty);
3732 self.check_expr_has_type(&idx, self.tcx.types.err);
3733 let mut err = self.type_error_struct(
3736 format!("cannot index a value of type `{}`",
3740 // Try to give some advice about indexing tuples.
3741 if let ty::TyTuple(_) = base_t.sty {
3742 let mut needs_note = true;
3743 // If the index is an integer, we can show the actual
3744 // fixed expression:
3745 if let hir::ExprLit(ref lit) = idx.node {
3746 if let ast::LitKind::Int(i,
3747 ast::LitIntType::Unsuffixed) = lit.node {
3748 let snip = tcx.sess.codemap().span_to_snippet(base.span);
3749 if let Ok(snip) = snip {
3750 err.span_suggestion(expr.span,
3751 "to access tuple elements, \
3752 use tuple indexing syntax \
3754 format!("{}.{}", snip, i));
3760 err.help("to access tuple elements, use tuple indexing \
3761 syntax (e.g. `tuple.0`)");
3765 self.write_ty(id, self.tcx().types.err);
3772 debug!("type of expr({}) {} is...", expr.id,
3773 pprust::expr_to_string(expr));
3774 debug!("... {:?}, expected is {:?}",
3779 // Finish resolving a path in a struct expression or pattern `S::A { .. }` if necessary.
3780 // The newly resolved definition is written into `def_map`.
3781 pub fn finish_resolving_struct_path(&self,
3783 node_id: ast::NodeId,
3787 let path_res = self.tcx().expect_resolution(node_id);
3788 if path_res.depth == 0 {
3789 // If fully resolved already, we don't have to do anything.
3792 let base_ty_end = path.segments.len() - path_res.depth;
3793 let (_ty, def) = AstConv::finish_resolving_def_to_ty(self, self, span,
3794 PathParamMode::Optional,
3798 &path.segments[..base_ty_end],
3799 &path.segments[base_ty_end..]);
3800 // Write back the new resolution.
3801 self.tcx().def_map.borrow_mut().insert(node_id, PathResolution::new(def));
3806 // Resolve associated value path into a base type and associated constant or method definition.
3807 // The newly resolved definition is written into `def_map`.
3808 pub fn resolve_ty_and_def_ufcs<'b>(&self,
3809 opt_self_ty: Option<Ty<'tcx>>,
3810 path: &'b hir::Path,
3811 node_id: ast::NodeId,
3813 -> (Def, Option<Ty<'tcx>>, &'b [hir::PathSegment])
3815 let path_res = self.tcx().expect_resolution(node_id);
3816 if path_res.depth == 0 {
3817 // If fully resolved already, we don't have to do anything.
3818 (path_res.base_def, opt_self_ty, &path.segments)
3820 // Try to resolve everything except for the last segment as a type.
3821 let ty_segments = path.segments.split_last().unwrap().1;
3822 let base_ty_end = path.segments.len() - path_res.depth;
3823 let (ty, _def) = AstConv::finish_resolving_def_to_ty(self, self, span,
3824 PathParamMode::Optional,
3828 &ty_segments[..base_ty_end],
3829 &ty_segments[base_ty_end..]);
3831 // Resolve an associated constant or method on the previously resolved type.
3832 let item_segment = path.segments.last().unwrap();
3833 let item_name = item_segment.name;
3834 let def = match self.resolve_ufcs(span, item_name, ty, node_id) {
3837 let def = match error {
3838 method::MethodError::PrivateMatch(def) => def,
3841 if item_name != keywords::Invalid.name() {
3842 self.report_method_error(span, ty, item_name, None, error);
3848 // Write back the new resolution.
3849 self.tcx().def_map.borrow_mut().insert(node_id, PathResolution::new(def));
3850 (def, Some(ty), slice::ref_slice(item_segment))
3854 pub fn check_decl_initializer(&self,
3855 local: &'gcx hir::Local,
3856 init: &'gcx hir::Expr)
3858 let ref_bindings = self.tcx.pat_contains_ref_binding(&local.pat);
3860 let local_ty = self.local_ty(init.span, local.id);
3861 if let Some(m) = ref_bindings {
3862 // Somewhat subtle: if we have a `ref` binding in the pattern,
3863 // we want to avoid introducing coercions for the RHS. This is
3864 // both because it helps preserve sanity and, in the case of
3865 // ref mut, for soundness (issue #23116). In particular, in
3866 // the latter case, we need to be clear that the type of the
3867 // referent for the reference that results is *equal to* the
3868 // type of the lvalue it is referencing, and not some
3869 // supertype thereof.
3870 self.check_expr_with_lvalue_pref(init, LvaluePreference::from_mutbl(m));
3871 let init_ty = self.expr_ty(init);
3872 self.demand_eqtype(init.span, init_ty, local_ty);
3874 self.check_expr_coercable_to_type(init, local_ty)
3878 pub fn check_decl_local(&self, local: &'gcx hir::Local) {
3879 let t = self.local_ty(local.span, local.id);
3880 self.write_ty(local.id, t);
3882 if let Some(ref init) = local.init {
3883 self.check_decl_initializer(local, &init);
3884 let init_ty = self.expr_ty(&init);
3885 if init_ty.references_error() {
3886 self.write_ty(local.id, init_ty);
3890 self.check_pat(&local.pat, t);
3891 let pat_ty = self.node_ty(local.pat.id);
3892 if pat_ty.references_error() {
3893 self.write_ty(local.id, pat_ty);
3897 pub fn check_stmt(&self, stmt: &'gcx hir::Stmt) {
3899 let mut saw_bot = false;
3900 let mut saw_err = false;
3902 hir::StmtDecl(ref decl, id) => {
3905 hir::DeclLocal(ref l) => {
3906 self.check_decl_local(&l);
3907 let l_t = self.node_ty(l.id);
3908 saw_bot = saw_bot || self.type_var_diverges(l_t);
3909 saw_err = saw_err || l_t.references_error();
3911 hir::DeclItem(_) => {/* ignore for now */ }
3914 hir::StmtExpr(ref expr, id) => {
3916 // Check with expected type of ()
3917 self.check_expr_has_type(&expr, self.tcx.mk_nil());
3918 let expr_ty = self.expr_ty(&expr);
3919 saw_bot = saw_bot || self.type_var_diverges(expr_ty);
3920 saw_err = saw_err || expr_ty.references_error();
3922 hir::StmtSemi(ref expr, id) => {
3924 self.check_expr(&expr);
3925 let expr_ty = self.expr_ty(&expr);
3926 saw_bot |= self.type_var_diverges(expr_ty);
3927 saw_err |= expr_ty.references_error();
3931 self.write_ty(node_id, self.next_diverging_ty_var());
3934 self.write_error(node_id);
3937 self.write_nil(node_id)
3941 pub fn check_block_no_value(&self, blk: &'gcx hir::Block) {
3942 self.check_block_with_expected(blk, ExpectHasType(self.tcx.mk_nil()));
3943 let blkty = self.node_ty(blk.id);
3944 if blkty.references_error() {
3945 self.write_error(blk.id);
3947 let nilty = self.tcx.mk_nil();
3948 self.demand_suptype(blk.span, nilty, blkty);
3952 fn check_block_with_expected(&self,
3953 blk: &'gcx hir::Block,
3954 expected: Expectation<'tcx>) {
3956 let mut fcx_ps = self.ps.borrow_mut();
3957 let unsafety_state = fcx_ps.recurse(blk);
3958 replace(&mut *fcx_ps, unsafety_state)
3961 let mut warned = false;
3962 let mut any_diverges = false;
3963 let mut any_err = false;
3964 for s in &blk.stmts {
3966 let s_id = s.node.id();
3967 let s_ty = self.node_ty(s_id);
3968 if any_diverges && !warned && match s.node {
3969 hir::StmtDecl(ref decl, _) => {
3971 hir::DeclLocal(_) => true,
3975 hir::StmtExpr(_, _) | hir::StmtSemi(_, _) => true,
3979 .add_lint(lint::builtin::UNREACHABLE_CODE,
3982 "unreachable statement".to_string());
3985 // FIXME(canndrew): This is_never should probably be an is_uninhabited
3986 any_diverges = any_diverges ||
3987 self.type_var_diverges(s_ty) ||
3989 any_err = any_err || s_ty.references_error();
3992 None => if any_err {
3993 self.write_error(blk.id);
3994 } else if any_diverges {
3995 self.write_ty(blk.id, self.next_diverging_ty_var());
3997 self.write_nil(blk.id);
4000 if any_diverges && !warned {
4003 .add_lint(lint::builtin::UNREACHABLE_CODE,
4006 "unreachable expression".to_string());
4008 let ety = match expected {
4009 ExpectHasType(ety) => {
4010 self.check_expr_coercable_to_type(&e, ety);
4014 self.check_expr_with_expectation(&e, expected);
4020 self.write_error(blk.id);
4021 } else if any_diverges {
4022 self.write_ty(blk.id, self.next_diverging_ty_var());
4024 self.write_ty(blk.id, ety);
4029 *self.ps.borrow_mut() = prev;
4032 // Instantiates the given path, which must refer to an item with the given
4033 // number of type parameters and type.
4034 pub fn instantiate_value_path(&self,
4035 segments: &[hir::PathSegment],
4036 opt_self_ty: Option<Ty<'tcx>>,
4039 node_id: ast::NodeId)
4041 debug!("instantiate_value_path(path={:?}, def={:?}, node_id={})",
4046 // We need to extract the type parameters supplied by the user in
4047 // the path `path`. Due to the current setup, this is a bit of a
4048 // tricky-process; the problem is that resolve only tells us the
4049 // end-point of the path resolution, and not the intermediate steps.
4050 // Luckily, we can (at least for now) deduce the intermediate steps
4051 // just from the end-point.
4053 // There are basically four cases to consider:
4055 // 1. Reference to a *type*, such as a struct or enum:
4057 // mod a { struct Foo<T> { ... } }
4059 // Because we don't allow types to be declared within one
4060 // another, a path that leads to a type will always look like
4061 // `a::b::Foo<T>` where `a` and `b` are modules. This implies
4062 // that only the final segment can have type parameters, and
4063 // they are located in the TypeSpace.
4065 // *Note:* Generally speaking, references to types don't
4066 // actually pass through this function, but rather the
4067 // `ast_ty_to_ty` function in `astconv`. However, in the case
4068 // of struct patterns (and maybe literals) we do invoke
4069 // `instantiate_value_path` to get the general type of an instance of
4070 // a struct. (In these cases, there are actually no type
4071 // parameters permitted at present, but perhaps we will allow
4072 // them in the future.)
4074 // 1b. Reference to an enum variant or tuple-like struct:
4076 // struct foo<T>(...)
4077 // enum E<T> { foo(...) }
4079 // In these cases, the parameters are declared in the type
4082 // 2. Reference to a *fn item*:
4086 // In this case, the path will again always have the form
4087 // `a::b::foo::<T>` where only the final segment should have
4088 // type parameters. However, in this case, those parameters are
4089 // declared on a value, and hence are in the `FnSpace`.
4091 // 3. Reference to a *method*:
4093 // impl<A> SomeStruct<A> {
4097 // Here we can have a path like
4098 // `a::b::SomeStruct::<A>::foo::<B>`, in which case parameters
4099 // may appear in two places. The penultimate segment,
4100 // `SomeStruct::<A>`, contains parameters in TypeSpace, and the
4101 // final segment, `foo::<B>` contains parameters in fn space.
4103 // 4. Reference to an *associated const*:
4105 // impl<A> AnotherStruct<A> {
4106 // const FOO: B = BAR;
4109 // The path in this case will look like
4110 // `a::b::AnotherStruct::<A>::FOO`, so the penultimate segment
4111 // only will have parameters in TypeSpace.
4113 // The first step then is to categorize the segments appropriately.
4115 assert!(!segments.is_empty());
4117 let mut ufcs_associated = None;
4118 let mut type_segment = None;
4119 let mut fn_segment = None;
4121 // Case 1 and 1b. Reference to a *type* or *enum variant*.
4122 Def::Struct(def_id) |
4123 Def::Variant(_, def_id) |
4125 Def::TyAlias(def_id) |
4126 Def::AssociatedTy(_, def_id) |
4127 Def::Trait(def_id) => {
4128 // Everything but the final segment should have no
4129 // parameters at all.
4130 let mut generics = self.tcx.lookup_generics(def_id);
4131 if let Some(def_id) = generics.parent {
4132 // Variant and struct constructors use the
4133 // generics of their parent type definition.
4134 generics = self.tcx.lookup_generics(def_id);
4136 type_segment = Some((segments.last().unwrap(), generics));
4139 // Case 2. Reference to a top-level value.
4141 Def::Const(def_id) |
4142 Def::Static(def_id, _) => {
4143 fn_segment = Some((segments.last().unwrap(),
4144 self.tcx.lookup_generics(def_id)));
4147 // Case 3. Reference to a method or associated const.
4148 Def::Method(def_id) |
4149 Def::AssociatedConst(def_id) => {
4150 let container = self.tcx.impl_or_trait_item(def_id).container();
4152 ty::TraitContainer(trait_did) => {
4153 callee::check_legal_trait_for_method_call(self.ccx, span, trait_did)
4155 ty::ImplContainer(_) => {}
4158 let generics = self.tcx.lookup_generics(def_id);
4159 if segments.len() >= 2 {
4160 let parent_generics = self.tcx.lookup_generics(generics.parent.unwrap());
4161 type_segment = Some((&segments[segments.len() - 2], parent_generics));
4163 // `<T>::assoc` will end up here, and so can `T::assoc`.
4164 let self_ty = opt_self_ty.expect("UFCS sugared assoc missing Self");
4165 ufcs_associated = Some((container, self_ty));
4167 fn_segment = Some((segments.last().unwrap(), generics));
4170 // Other cases. Various nonsense that really shouldn't show up
4171 // here. If they do, an error will have been reported
4172 // elsewhere. (I hope)
4174 Def::ForeignMod(..) |
4184 // In `<T as Trait<A, B>>::method`, `A` and `B` are mandatory, but
4185 // `opt_self_ty` can also be Some for `Foo::method`, where Foo's
4186 // type parameters are not mandatory.
4187 let require_type_space = opt_self_ty.is_some() && ufcs_associated.is_none();
4189 debug!("type_segment={:?} fn_segment={:?}", type_segment, fn_segment);
4191 // Now that we have categorized what space the parameters for each
4192 // segment belong to, let's sort out the parameters that the user
4193 // provided (if any) into their appropriate spaces. We'll also report
4194 // errors if type parameters are provided in an inappropriate place.
4195 let poly_segments = type_segment.is_some() as usize +
4196 fn_segment.is_some() as usize;
4197 self.tcx.prohibit_type_params(&segments[..segments.len() - poly_segments]);
4200 Def::Local(_, nid) | Def::Upvar(_, nid, _, _) => {
4201 let ty = self.local_ty(span, nid);
4202 let ty = self.normalize_associated_types_in(span, &ty);
4203 self.write_ty(node_id, ty);
4204 self.write_substs(node_id, ty::ItemSubsts {
4205 substs: Substs::empty(self.tcx)
4212 // Now we have to compare the types that the user *actually*
4213 // provided against the types that were *expected*. If the user
4214 // did not provide any types, then we want to substitute inference
4215 // variables. If the user provided some types, we may still need
4216 // to add defaults. If the user provided *too many* types, that's
4218 self.check_path_parameter_count(span, !require_type_space, &mut type_segment);
4219 self.check_path_parameter_count(span, true, &mut fn_segment);
4221 let (fn_start, has_self) = match (type_segment, fn_segment) {
4222 (_, Some((_, generics))) => {
4223 (generics.parent_count(), generics.has_self)
4225 (Some((_, generics)), None) => {
4226 (generics.own_count(), generics.has_self)
4228 (None, None) => (0, false)
4230 let substs = Substs::for_item(self.tcx, def.def_id(), |def, _| {
4231 let mut i = def.index as usize;
4233 let segment = if i < fn_start {
4234 i -= has_self as usize;
4240 let lifetimes = match segment.map(|(s, _)| &s.parameters) {
4241 Some(&hir::AngleBracketedParameters(ref data)) => &data.lifetimes[..],
4242 Some(&hir::ParenthesizedParameters(_)) => bug!(),
4246 if let Some(ast_lifetime) = lifetimes.get(i) {
4247 ast_region_to_region(self.tcx, ast_lifetime)
4249 self.region_var_for_def(span, def)
4252 let mut i = def.index as usize;
4254 let can_omit = i >= fn_start || !require_type_space;
4255 let segment = if i < fn_start {
4256 // Handle Self first, so we can adjust the index to match the AST.
4257 if has_self && i == 0 {
4258 return opt_self_ty.unwrap_or_else(|| {
4259 self.type_var_for_def(span, def, substs)
4262 i -= has_self as usize;
4268 let types = match segment.map(|(s, _)| &s.parameters) {
4269 Some(&hir::AngleBracketedParameters(ref data)) => &data.types[..],
4270 Some(&hir::ParenthesizedParameters(_)) => bug!(),
4274 // Skip over the lifetimes in the same segment.
4275 if let Some((_, generics)) = segment {
4276 i -= generics.regions.len();
4279 let omitted = can_omit && types.is_empty();
4280 if let Some(ast_ty) = types.get(i) {
4281 // A provided type parameter.
4283 } else if let (false, Some(default)) = (omitted, def.default) {
4284 // No type parameter provided, but a default exists.
4285 default.subst_spanned(self.tcx, substs, Some(span))
4287 // No type parameters were provided, we can infer all.
4288 // This can also be reached in some error cases:
4289 // We prefer to use inference variables instead of
4290 // TyError to let type inference recover somewhat.
4291 self.type_var_for_def(span, def, substs)
4295 // The things we are substituting into the type should not contain
4296 // escaping late-bound regions, and nor should the base type scheme.
4297 let scheme = self.tcx.lookup_item_type(def.def_id());
4298 let type_predicates = self.tcx.lookup_predicates(def.def_id());
4299 assert!(!substs.has_escaping_regions());
4300 assert!(!scheme.ty.has_escaping_regions());
4302 // Add all the obligations that are required, substituting and
4303 // normalized appropriately.
4304 let bounds = self.instantiate_bounds(span, &substs, &type_predicates);
4305 self.add_obligations_for_parameters(
4306 traits::ObligationCause::new(span, self.body_id, traits::ItemObligation(def.def_id())),
4309 // Substitute the values for the type parameters into the type of
4310 // the referenced item.
4311 let ty_substituted = self.instantiate_type_scheme(span, &substs, &scheme.ty);
4314 if let Some((ty::ImplContainer(impl_def_id), self_ty)) = ufcs_associated {
4315 // In the case of `Foo<T>::method` and `<Foo<T>>::method`, if `method`
4316 // is inherent, there is no `Self` parameter, instead, the impl needs
4317 // type parameters, which we can infer by unifying the provided `Self`
4318 // with the substituted impl type.
4319 let impl_scheme = self.tcx.lookup_item_type(impl_def_id);
4321 let impl_ty = self.instantiate_type_scheme(span, &substs, &impl_scheme.ty);
4322 match self.sub_types(false, TypeOrigin::Misc(span), self_ty, impl_ty) {
4323 Ok(InferOk { obligations, .. }) => {
4324 // FIXME(#32730) propagate obligations
4325 assert!(obligations.is_empty());
4329 "instantiate_value_path: (UFCS) {:?} was a subtype of {:?} but now is not?",
4336 debug!("instantiate_value_path: type of {:?} is {:?}",
4339 self.write_ty(node_id, ty_substituted);
4340 self.write_substs(node_id, ty::ItemSubsts {
4346 /// Report errors if the provided parameters are too few or too many.
4347 fn check_path_parameter_count(&self,
4350 segment: &mut Option<(&hir::PathSegment, &ty::Generics)>) {
4351 let (lifetimes, types, bindings) = match segment.map(|(s, _)| &s.parameters) {
4352 Some(&hir::AngleBracketedParameters(ref data)) => {
4353 (&data.lifetimes[..], &data.types[..], &data.bindings[..])
4355 Some(&hir::ParenthesizedParameters(_)) => {
4356 span_bug!(span, "parenthesized parameters cannot appear in ExprPath");
4358 None => (&[][..], &[][..], &[][..])
4362 format!("{} parameter{}", n, if n == 1 { "" } else { "s" })
4365 // Check provided lifetime parameters.
4366 let lifetime_defs = segment.map_or(&[][..], |(_, generics)| &generics.regions);
4367 if lifetimes.len() > lifetime_defs.len() {
4368 let span = lifetimes[lifetime_defs.len()].span;
4369 span_err!(self.tcx.sess, span, E0088,
4370 "too many lifetime parameters provided: \
4371 expected {}, found {}",
4372 count(lifetime_defs.len()),
4373 count(lifetimes.len()));
4374 } else if lifetimes.len() > 0 && lifetimes.len() < lifetime_defs.len() {
4375 span_err!(self.tcx.sess, span, E0090,
4376 "too few lifetime parameters provided: \
4377 expected {}, found {}",
4378 count(lifetime_defs.len()),
4379 count(lifetimes.len()));
4382 // Check provided type parameters.
4383 let type_defs = segment.map_or(&[][..], |(_, generics)| {
4384 if generics.parent.is_none() {
4385 &generics.types[generics.has_self as usize..]
4390 let required_len = type_defs.iter()
4391 .take_while(|d| d.default.is_none())
4393 if types.len() > type_defs.len() {
4394 let span = types[type_defs.len()].span;
4395 struct_span_err!(self.tcx.sess, span, E0087,
4396 "too many type parameters provided: \
4397 expected at most {}, found {}",
4398 count(type_defs.len()),
4400 .span_label(span, &format!("too many type parameters")).emit();
4402 // To prevent derived errors to accumulate due to extra
4403 // type parameters, we force instantiate_value_path to
4404 // use inference variables instead of the provided types.
4406 } else if !(can_omit && types.len() == 0) && types.len() < required_len {
4408 if type_defs.len() != required_len { "at least " } else { "" };
4409 span_err!(self.tcx.sess, span, E0089,
4410 "too few type parameters provided: \
4411 expected {}{}, found {}",
4413 count(required_len),
4414 count(types.len()));
4417 if !bindings.is_empty() {
4418 span_err!(self.tcx.sess, bindings[0].span, E0182,
4419 "unexpected binding of associated item in expression path \
4420 (only allowed in type paths)");
4424 fn structurally_resolve_type_or_else<F>(&self, sp: Span, ty: Ty<'tcx>, f: F)
4426 where F: Fn() -> Ty<'tcx>
4428 let mut ty = self.resolve_type_vars_with_obligations(ty);
4431 let alternative = f();
4434 if alternative.is_ty_var() || alternative.references_error() {
4435 if !self.is_tainted_by_errors() {
4436 self.type_error_message(sp, |_actual| {
4437 "the type of this value must be known in this context".to_string()
4440 self.demand_suptype(sp, self.tcx.types.err, ty);
4441 ty = self.tcx.types.err;
4443 self.demand_suptype(sp, alternative, ty);
4451 // Resolves `typ` by a single level if `typ` is a type variable. If no
4452 // resolution is possible, then an error is reported.
4453 pub fn structurally_resolved_type(&self, sp: Span, ty: Ty<'tcx>) -> Ty<'tcx> {
4454 self.structurally_resolve_type_or_else(sp, ty, || {
4460 // Returns true if b contains a break that can exit from b
4461 pub fn may_break(tcx: TyCtxt, id: ast::NodeId, b: &hir::Block) -> bool {
4462 // First: is there an unlabeled break immediately
4464 (loop_query(&b, |e| {
4466 hir::ExprBreak(None) => true,
4470 // Second: is there a labeled break with label
4471 // <id> nested anywhere inside the loop?
4472 (block_query(b, |e| {
4473 if let hir::ExprBreak(Some(_)) = e.node {
4474 tcx.expect_def(e.id) == Def::Label(id)
4481 pub fn check_bounds_are_used<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
4482 generics: &hir::Generics,
4484 debug!("check_bounds_are_used(n_tps={}, ty={:?})",
4485 generics.ty_params.len(), ty);
4487 // make a vector of booleans initially false, set to true when used
4488 if generics.ty_params.is_empty() { return; }
4489 let mut tps_used = vec![false; generics.ty_params.len()];
4491 for leaf_ty in ty.walk() {
4492 if let ty::TyParam(ParamTy {idx, ..}) = leaf_ty.sty {
4493 debug!("Found use of ty param num {}", idx);
4494 tps_used[idx as usize - generics.lifetimes.len()] = true;
4498 for (&used, param) in tps_used.iter().zip(&generics.ty_params) {
4500 struct_span_err!(ccx.tcx.sess, param.span, E0091,
4501 "type parameter `{}` is unused",
4503 .span_label(param.span, &format!("unused type parameter"))