1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
15 Within the check phase of type check, we check each item one at a time
16 (bodies of function expressions are checked as part of the containing
17 function). Inference is used to supply types wherever they are
20 By far the most complex case is checking the body of a function. This
21 can be broken down into several distinct phases:
23 - gather: creates type variables to represent the type of each local
24 variable and pattern binding.
26 - main: the main pass does the lion's share of the work: it
27 determines the types of all expressions, resolves
28 methods, checks for most invalid conditions, and so forth. In
29 some cases, where a type is unknown, it may create a type or region
30 variable and use that as the type of an expression.
32 In the process of checking, various constraints will be placed on
33 these type variables through the subtyping relationships requested
34 through the `demand` module. The `infer` module is in charge
35 of resolving those constraints.
37 - regionck: after main is complete, the regionck pass goes over all
38 types looking for regions and making sure that they did not escape
39 into places they are not in scope. This may also influence the
40 final assignments of the various region variables if there is some
43 - vtable: find and records the impls to use for each trait bound that
44 appears on a type parameter.
46 - writeback: writes the final types within a function body, replacing
47 type variables with their final inferred types. These final types
48 are written into the `tcx.node_types` table, which should *never* contain
49 any reference to a type variable.
53 While type checking a function, the intermediate types for the
54 expressions, blocks, and so forth contained within the function are
55 stored in `fcx.node_types` and `fcx.item_substs`. These types
56 may contain unresolved type variables. After type checking is
57 complete, the functions in the writeback module are used to take the
58 types from this table, resolve them, and then write them into their
59 permanent home in the type context `ccx.tcx`.
61 This means that during inferencing you should use `fcx.write_ty()`
62 and `fcx.expr_ty()` / `fcx.node_ty()` to write/obtain the types of
63 nodes within the function.
65 The types of top-level items, which never contain unbound type
66 variables, are stored directly into the `tcx` tables.
68 n.b.: A type variable is not the same thing as a type parameter. A
69 type variable is rather an "instance" of a type parameter: that is,
70 given a generic function `fn foo<T>(t: T)`: while checking the
71 function `foo`, the type `ty_param(0)` refers to the type `T`, which
72 is treated in abstract. When `foo()` is called, however, `T` will be
73 substituted for a fresh type variable `N`. This variable will
74 eventually be resolved to some concrete type (which might itself be
79 pub use self::Expectation::*;
80 pub use self::compare_method::{compare_impl_method, compare_const_impl};
81 use self::TupleArgumentsFlag::*;
83 use astconv::{AstConv, ast_region_to_region, PathParamMode};
84 use dep_graph::DepNode;
85 use fmt_macros::{Parser, Piece, Position};
86 use middle::cstore::LOCAL_CRATE;
87 use hir::def::{Def, PathResolution};
88 use hir::def_id::DefId;
90 use rustc::infer::{self, InferCtxt, InferOk, TypeOrigin, TypeTrace, type_variable};
91 use rustc::ty::subst::{Subst, Substs};
92 use rustc::traits::{self, Reveal};
93 use rustc::ty::{ParamTy, ParameterEnvironment};
94 use rustc::ty::{LvaluePreference, NoPreference, PreferMutLvalue};
95 use rustc::ty::{self, ToPolyTraitRef, Ty, TyCtxt, Visibility};
96 use rustc::ty::{MethodCall, MethodCallee};
97 use rustc::ty::adjustment;
98 use rustc::ty::fold::{BottomUpFolder, TypeFoldable};
99 use rustc::ty::util::{Representability, IntTypeExt};
100 use require_c_abi_if_variadic;
101 use rscope::{ElisionFailureInfo, RegionScope};
102 use session::{Session, CompileResult};
106 use util::common::{block_query, ErrorReported, indenter, loop_query};
107 use util::nodemap::{DefIdMap, FnvHashMap, FnvHashSet, NodeMap};
109 use std::cell::{Cell, Ref, RefCell};
110 use std::mem::replace;
112 use syntax::abi::Abi;
115 use syntax::codemap::{self, Spanned};
116 use syntax::feature_gate::{GateIssue, emit_feature_err};
117 use syntax::parse::token::{self, InternedString, keywords};
119 use syntax::util::lev_distance::find_best_match_for_name;
120 use syntax_pos::{self, Span};
121 use errors::DiagnosticBuilder;
123 use rustc::hir::intravisit::{self, Visitor};
124 use rustc::hir::{self, PatKind};
125 use rustc::hir::print as pprust;
126 use rustc_back::slice;
127 use rustc_const_eval::eval_length;
147 /// closures defined within the function. For example:
150 /// bar(move|| { ... })
153 /// Here, the function `foo()` and the closure passed to
154 /// `bar()` will each have their own `FnCtxt`, but they will
155 /// share the inherited fields.
156 pub struct Inherited<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
157 ccx: &'a CrateCtxt<'a, 'gcx>,
158 infcx: InferCtxt<'a, 'gcx, 'tcx>,
159 locals: RefCell<NodeMap<Ty<'tcx>>>,
161 fulfillment_cx: RefCell<traits::FulfillmentContext<'tcx>>,
163 // When we process a call like `c()` where `c` is a closure type,
164 // we may not have decided yet whether `c` is a `Fn`, `FnMut`, or
165 // `FnOnce` closure. In that case, we defer full resolution of the
166 // call until upvar inference can kick in and make the
167 // decision. We keep these deferred resolutions grouped by the
168 // def-id of the closure, so that once we decide, we can easily go
169 // back and process them.
170 deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolutionHandler<'gcx, 'tcx>>>>,
172 deferred_cast_checks: RefCell<Vec<cast::CastCheck<'tcx>>>,
174 // Anonymized types found in explicit return types and their
175 // associated fresh inference variable. Writeback resolves these
176 // variables to get the concrete type, which can be used to
177 // deanonymize TyAnon, after typeck is done with all functions.
178 anon_types: RefCell<DefIdMap<Ty<'tcx>>>,
180 // Obligations which will have to be checked at the end of
181 // type-checking, after all functions have been inferred.
182 deferred_obligations: RefCell<Vec<traits::DeferredObligation<'tcx>>>,
185 impl<'a, 'gcx, 'tcx> Deref for Inherited<'a, 'gcx, 'tcx> {
186 type Target = InferCtxt<'a, 'gcx, 'tcx>;
187 fn deref(&self) -> &Self::Target {
192 trait DeferredCallResolution<'gcx, 'tcx> {
193 fn resolve<'a>(&mut self, fcx: &FnCtxt<'a, 'gcx, 'tcx>);
196 type DeferredCallResolutionHandler<'gcx, 'tcx> = Box<DeferredCallResolution<'gcx, 'tcx>+'tcx>;
198 /// When type-checking an expression, we propagate downward
199 /// whatever type hint we are able in the form of an `Expectation`.
200 #[derive(Copy, Clone, Debug)]
201 pub enum Expectation<'tcx> {
202 /// We know nothing about what type this expression should have.
205 /// This expression should have the type given (or some subtype)
206 ExpectHasType(Ty<'tcx>),
208 /// This expression will be cast to the `Ty`
209 ExpectCastableToType(Ty<'tcx>),
211 /// This rvalue expression will be wrapped in `&` or `Box` and coerced
212 /// to `&Ty` or `Box<Ty>`, respectively. `Ty` is `[A]` or `Trait`.
213 ExpectRvalueLikeUnsized(Ty<'tcx>),
216 impl<'a, 'gcx, 'tcx> Expectation<'tcx> {
217 // Disregard "castable to" expectations because they
218 // can lead us astray. Consider for example `if cond
219 // {22} else {c} as u8` -- if we propagate the
220 // "castable to u8" constraint to 22, it will pick the
221 // type 22u8, which is overly constrained (c might not
222 // be a u8). In effect, the problem is that the
223 // "castable to" expectation is not the tightest thing
224 // we can say, so we want to drop it in this case.
225 // The tightest thing we can say is "must unify with
226 // else branch". Note that in the case of a "has type"
227 // constraint, this limitation does not hold.
229 // If the expected type is just a type variable, then don't use
230 // an expected type. Otherwise, we might write parts of the type
231 // when checking the 'then' block which are incompatible with the
233 fn adjust_for_branches(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Expectation<'tcx> {
235 ExpectHasType(ety) => {
236 let ety = fcx.shallow_resolve(ety);
237 if !ety.is_ty_var() {
243 ExpectRvalueLikeUnsized(ety) => {
244 ExpectRvalueLikeUnsized(ety)
250 /// Provide an expectation for an rvalue expression given an *optional*
251 /// hint, which is not required for type safety (the resulting type might
252 /// be checked higher up, as is the case with `&expr` and `box expr`), but
253 /// is useful in determining the concrete type.
255 /// The primary use case is where the expected type is a fat pointer,
256 /// like `&[isize]`. For example, consider the following statement:
258 /// let x: &[isize] = &[1, 2, 3];
260 /// In this case, the expected type for the `&[1, 2, 3]` expression is
261 /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
262 /// expectation `ExpectHasType([isize])`, that would be too strong --
263 /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
264 /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
265 /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
266 /// which still is useful, because it informs integer literals and the like.
267 /// See the test case `test/run-pass/coerce-expect-unsized.rs` and #20169
268 /// for examples of where this comes up,.
269 fn rvalue_hint(fcx: &FnCtxt<'a, 'gcx, 'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> {
270 match fcx.tcx.struct_tail(ty).sty {
271 ty::TySlice(_) | ty::TyStr | ty::TyTrait(..) => {
272 ExpectRvalueLikeUnsized(ty)
274 _ => ExpectHasType(ty)
278 // Resolves `expected` by a single level if it is a variable. If
279 // there is no expected type or resolution is not possible (e.g.,
280 // no constraints yet present), just returns `None`.
281 fn resolve(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Expectation<'tcx> {
286 ExpectCastableToType(t) => {
287 ExpectCastableToType(fcx.resolve_type_vars_if_possible(&t))
289 ExpectHasType(t) => {
290 ExpectHasType(fcx.resolve_type_vars_if_possible(&t))
292 ExpectRvalueLikeUnsized(t) => {
293 ExpectRvalueLikeUnsized(fcx.resolve_type_vars_if_possible(&t))
298 fn to_option(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
299 match self.resolve(fcx) {
300 NoExpectation => None,
301 ExpectCastableToType(ty) |
303 ExpectRvalueLikeUnsized(ty) => Some(ty),
307 fn only_has_type(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
308 match self.resolve(fcx) {
309 ExpectHasType(ty) => Some(ty),
315 #[derive(Copy, Clone)]
316 pub struct UnsafetyState {
317 pub def: ast::NodeId,
318 pub unsafety: hir::Unsafety,
319 pub unsafe_push_count: u32,
324 pub fn function(unsafety: hir::Unsafety, def: ast::NodeId) -> UnsafetyState {
325 UnsafetyState { def: def, unsafety: unsafety, unsafe_push_count: 0, from_fn: true }
328 pub fn recurse(&mut self, blk: &hir::Block) -> UnsafetyState {
329 match self.unsafety {
330 // If this unsafe, then if the outer function was already marked as
331 // unsafe we shouldn't attribute the unsafe'ness to the block. This
332 // way the block can be warned about instead of ignoring this
333 // extraneous block (functions are never warned about).
334 hir::Unsafety::Unsafe if self.from_fn => *self,
337 let (unsafety, def, count) = match blk.rules {
338 hir::PushUnsafeBlock(..) =>
339 (unsafety, blk.id, self.unsafe_push_count.checked_add(1).unwrap()),
340 hir::PopUnsafeBlock(..) =>
341 (unsafety, blk.id, self.unsafe_push_count.checked_sub(1).unwrap()),
342 hir::UnsafeBlock(..) =>
343 (hir::Unsafety::Unsafe, blk.id, self.unsafe_push_count),
344 hir::DefaultBlock | hir::PushUnstableBlock | hir:: PopUnstableBlock =>
345 (unsafety, self.def, self.unsafe_push_count),
347 UnsafetyState{ def: def,
349 unsafe_push_count: count,
357 pub struct FnCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
358 ast_ty_to_ty_cache: RefCell<NodeMap<Ty<'tcx>>>,
360 body_id: ast::NodeId,
362 // This flag is set to true if, during the writeback phase, we encounter
363 // a type error in this function.
364 writeback_errors: Cell<bool>,
366 // Number of errors that had been reported when we started
367 // checking this function. On exit, if we find that *more* errors
368 // have been reported, we will skip regionck and other work that
369 // expects the types within the function to be consistent.
370 err_count_on_creation: usize,
374 ps: RefCell<UnsafetyState>,
376 inh: &'a Inherited<'a, 'gcx, 'tcx>,
379 impl<'a, 'gcx, 'tcx> Deref for FnCtxt<'a, 'gcx, 'tcx> {
380 type Target = Inherited<'a, 'gcx, 'tcx>;
381 fn deref(&self) -> &Self::Target {
386 /// Helper type of a temporary returned by ccx.inherited(...).
387 /// Necessary because we can't write the following bound:
388 /// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Inherited<'b, 'gcx, 'tcx>).
389 pub struct InheritedBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
390 ccx: &'a CrateCtxt<'a, 'gcx>,
391 infcx: infer::InferCtxtBuilder<'a, 'gcx, 'tcx>
394 impl<'a, 'gcx, 'tcx> CrateCtxt<'a, 'gcx> {
395 pub fn inherited(&'a self, id: ast::NodeId)
396 -> InheritedBuilder<'a, 'gcx, 'tcx> {
397 let param_env = ParameterEnvironment::for_item(self.tcx, id);
400 infcx: self.tcx.infer_ctxt(Some(ty::Tables::empty()),
402 Reveal::NotSpecializable)
407 impl<'a, 'gcx, 'tcx> InheritedBuilder<'a, 'gcx, 'tcx> {
408 fn enter<F, R>(&'tcx mut self, f: F) -> R
409 where F: for<'b> FnOnce(Inherited<'b, 'gcx, 'tcx>) -> R
412 self.infcx.enter(|infcx| {
416 fulfillment_cx: RefCell::new(traits::FulfillmentContext::new()),
417 locals: RefCell::new(NodeMap()),
418 deferred_call_resolutions: RefCell::new(DefIdMap()),
419 deferred_cast_checks: RefCell::new(Vec::new()),
420 anon_types: RefCell::new(DefIdMap()),
421 deferred_obligations: RefCell::new(Vec::new()),
427 impl<'a, 'gcx, 'tcx> Inherited<'a, 'gcx, 'tcx> {
428 fn normalize_associated_types_in<T>(&self,
430 body_id: ast::NodeId,
433 where T : TypeFoldable<'tcx>
435 assoc::normalize_associated_types_in(self,
436 &mut self.fulfillment_cx.borrow_mut(),
444 struct CheckItemTypesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
445 struct CheckItemBodiesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
447 impl<'a, 'tcx> Visitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> {
448 fn visit_item(&mut self, i: &'tcx hir::Item) {
449 check_item_type(self.ccx, i);
450 intravisit::walk_item(self, i);
453 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
455 hir::TyFixedLengthVec(_, ref expr) => {
456 check_const_with_type(self.ccx, &expr, self.ccx.tcx.types.usize, expr.id);
461 intravisit::walk_ty(self, t);
465 impl<'a, 'tcx> Visitor<'tcx> for CheckItemBodiesVisitor<'a, 'tcx> {
466 fn visit_item(&mut self, i: &'tcx hir::Item) {
467 check_item_body(self.ccx, i);
471 pub fn check_wf_new(ccx: &CrateCtxt) -> CompileResult {
472 ccx.tcx.sess.track_errors(|| {
473 let mut visit = wfcheck::CheckTypeWellFormedVisitor::new(ccx);
474 ccx.tcx.visit_all_items_in_krate(DepNode::WfCheck, &mut visit);
478 pub fn check_item_types(ccx: &CrateCtxt) -> CompileResult {
479 ccx.tcx.sess.track_errors(|| {
480 let mut visit = CheckItemTypesVisitor { ccx: ccx };
481 ccx.tcx.visit_all_items_in_krate(DepNode::TypeckItemType, &mut visit);
485 pub fn check_item_bodies(ccx: &CrateCtxt) -> CompileResult {
486 ccx.tcx.sess.track_errors(|| {
487 let mut visit = CheckItemBodiesVisitor { ccx: ccx };
488 ccx.tcx.visit_all_items_in_krate(DepNode::TypeckItemBody, &mut visit);
490 // Process deferred obligations, now that all functions
491 // bodies have been fully inferred.
492 for (&item_id, obligations) in ccx.deferred_obligations.borrow().iter() {
493 // Use the same DepNode as for the body of the original function/item.
494 let def_id = ccx.tcx.map.local_def_id(item_id);
495 let _task = ccx.tcx.dep_graph.in_task(DepNode::TypeckItemBody(def_id));
497 let param_env = ParameterEnvironment::for_item(ccx.tcx, item_id);
498 ccx.tcx.infer_ctxt(None, Some(param_env),
499 Reveal::NotSpecializable).enter(|infcx| {
500 let mut fulfillment_cx = traits::FulfillmentContext::new();
501 for obligation in obligations.iter().map(|o| o.to_obligation()) {
502 fulfillment_cx.register_predicate_obligation(&infcx, obligation);
505 if let Err(errors) = fulfillment_cx.select_all_or_error(&infcx) {
506 infcx.report_fulfillment_errors(&errors);
513 pub fn check_drop_impls(ccx: &CrateCtxt) -> CompileResult {
514 ccx.tcx.sess.track_errors(|| {
515 let _task = ccx.tcx.dep_graph.in_task(DepNode::Dropck);
516 let drop_trait = match ccx.tcx.lang_items.drop_trait() {
517 Some(id) => ccx.tcx.lookup_trait_def(id), None => { return }
519 drop_trait.for_each_impl(ccx.tcx, |drop_impl_did| {
520 let _task = ccx.tcx.dep_graph.in_task(DepNode::DropckImpl(drop_impl_did));
521 if drop_impl_did.is_local() {
522 match dropck::check_drop_impl(ccx, drop_impl_did) {
525 assert!(ccx.tcx.sess.has_errors());
533 fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
534 decl: &'tcx hir::FnDecl,
535 body: &'tcx hir::Block,
536 fn_id: ast::NodeId) {
537 let raw_fty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(fn_id)).ty;
538 let fn_ty = match raw_fty.sty {
539 ty::TyFnDef(_, _, f) => f,
540 _ => span_bug!(body.span, "check_bare_fn: function type expected")
543 ccx.inherited(fn_id).enter(|inh| {
544 // Compute the fty from point of view of inside fn.
545 let fn_scope = inh.tcx.region_maps.call_site_extent(fn_id, body.id);
547 fn_ty.sig.subst(inh.tcx, &inh.parameter_environment.free_substs);
549 inh.tcx.liberate_late_bound_regions(fn_scope, &fn_sig);
551 inh.normalize_associated_types_in(body.span, body.id, &fn_sig);
553 let fcx = check_fn(&inh, fn_ty.unsafety, fn_id, &fn_sig, decl, fn_id, body);
555 fcx.select_all_obligations_and_apply_defaults();
556 fcx.closure_analyze_fn(body);
557 fcx.select_obligations_where_possible();
559 fcx.select_all_obligations_or_error(); // Casts can introduce new obligations.
561 fcx.regionck_fn(fn_id, decl, body);
562 fcx.resolve_type_vars_in_fn(decl, body, fn_id);
566 struct GatherLocalsVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
567 fcx: &'a FnCtxt<'a, 'gcx, 'tcx>
570 impl<'a, 'gcx, 'tcx> GatherLocalsVisitor<'a, 'gcx, 'tcx> {
571 fn assign(&mut self, _span: Span, nid: ast::NodeId, ty_opt: Option<Ty<'tcx>>) -> Ty<'tcx> {
574 // infer the variable's type
575 let var_ty = self.fcx.next_ty_var();
576 self.fcx.locals.borrow_mut().insert(nid, var_ty);
580 // take type that the user specified
581 self.fcx.locals.borrow_mut().insert(nid, typ);
588 impl<'a, 'gcx, 'tcx> Visitor<'gcx> for GatherLocalsVisitor<'a, 'gcx, 'tcx> {
589 // Add explicitly-declared locals.
590 fn visit_local(&mut self, local: &'gcx hir::Local) {
591 let o_ty = match local.ty {
592 Some(ref ty) => Some(self.fcx.to_ty(&ty)),
595 self.assign(local.span, local.id, o_ty);
596 debug!("Local variable {:?} is assigned type {}",
598 self.fcx.ty_to_string(
599 self.fcx.locals.borrow().get(&local.id).unwrap().clone()));
600 intravisit::walk_local(self, local);
603 // Add pattern bindings.
604 fn visit_pat(&mut self, p: &'gcx hir::Pat) {
605 if let PatKind::Binding(_, ref path1, _) = p.node {
606 let var_ty = self.assign(p.span, p.id, None);
608 self.fcx.require_type_is_sized(var_ty, p.span,
609 traits::VariableType(p.id));
611 debug!("Pattern binding {} is assigned to {} with type {:?}",
613 self.fcx.ty_to_string(
614 self.fcx.locals.borrow().get(&p.id).unwrap().clone()),
617 intravisit::walk_pat(self, p);
620 fn visit_block(&mut self, b: &'gcx hir::Block) {
621 // non-obvious: the `blk` variable maps to region lb, so
622 // we have to keep this up-to-date. This
623 // is... unfortunate. It'd be nice to not need this.
624 intravisit::walk_block(self, b);
627 // Since an expr occurs as part of the type fixed size arrays we
628 // need to record the type for that node
629 fn visit_ty(&mut self, t: &'gcx hir::Ty) {
631 hir::TyFixedLengthVec(ref ty, ref count_expr) => {
633 self.fcx.check_expr_with_hint(&count_expr, self.fcx.tcx.types.usize);
635 hir::TyBareFn(ref function_declaration) => {
636 intravisit::walk_fn_decl_nopat(self, &function_declaration.decl);
637 walk_list!(self, visit_lifetime_def, &function_declaration.lifetimes);
639 _ => intravisit::walk_ty(self, t)
643 // Don't descend into the bodies of nested closures
644 fn visit_fn(&mut self, _: intravisit::FnKind<'gcx>, _: &'gcx hir::FnDecl,
645 _: &'gcx hir::Block, _: Span, _: ast::NodeId) { }
648 /// Helper used by check_bare_fn and check_expr_fn. Does the grungy work of checking a function
649 /// body and returns the function context used for that purpose, since in the case of a fn item
650 /// there is still a bit more to do.
653 /// * inherited: other fields inherited from the enclosing fn (if any)
654 fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>,
655 unsafety: hir::Unsafety,
656 unsafety_id: ast::NodeId,
657 fn_sig: &ty::FnSig<'tcx>,
658 decl: &'gcx hir::FnDecl,
660 body: &'gcx hir::Block)
661 -> FnCtxt<'a, 'gcx, 'tcx>
663 let mut fn_sig = fn_sig.clone();
665 debug!("check_fn(sig={:?}, fn_id={})", fn_sig, fn_id);
667 // Create the function context. This is either derived from scratch or,
668 // in the case of function expressions, based on the outer context.
669 let mut fcx = FnCtxt::new(inherited, fn_sig.output, body.id);
670 *fcx.ps.borrow_mut() = UnsafetyState::function(unsafety, unsafety_id);
672 fcx.require_type_is_sized(fcx.ret_ty, decl.output.span(), traits::ReturnType);
673 fcx.ret_ty = fcx.instantiate_anon_types(&fcx.ret_ty);
674 fn_sig.output = fcx.ret_ty;
677 let mut visit = GatherLocalsVisitor { fcx: &fcx, };
679 // Add formal parameters.
680 for (arg_ty, input) in fn_sig.inputs.iter().zip(&decl.inputs) {
681 // The type of the argument must be well-formed.
683 // NB -- this is now checked in wfcheck, but that
684 // currently only results in warnings, so we issue an
685 // old-style WF obligation here so that we still get the
686 // errors that we used to get.
687 fcx.register_old_wf_obligation(arg_ty, input.ty.span, traits::MiscObligation);
689 // Create type variables for each argument.
690 pat_util::pat_bindings(&input.pat, |_bm, pat_id, sp, _path| {
691 let var_ty = visit.assign(sp, pat_id, None);
692 fcx.require_type_is_sized(var_ty, sp, traits::VariableType(pat_id));
695 // Check the pattern.
696 fcx.check_pat(&input.pat, arg_ty);
697 fcx.write_ty(input.id, arg_ty);
700 visit.visit_block(body);
703 inherited.tables.borrow_mut().liberated_fn_sigs.insert(fn_id, fn_sig);
705 // FIXME(aburka) do we need this special case? and should it be is_uninhabited?
706 let expected = if fcx.ret_ty.is_never() {
709 ExpectHasType(fcx.ret_ty)
711 fcx.check_block_with_expected(body, expected);
716 pub fn check_struct(ccx: &CrateCtxt, id: ast::NodeId, span: Span) {
719 check_representable(tcx, span, id, "struct");
721 if tcx.lookup_simd(ccx.tcx.map.local_def_id(id)) {
722 check_simd(tcx, span, id);
726 pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
727 debug!("check_item_type(it.id={}, it.name={})",
729 ccx.tcx.item_path_str(ccx.tcx.map.local_def_id(it.id)));
730 let _indenter = indenter();
732 // Consts can play a role in type-checking, so they are included here.
733 hir::ItemStatic(_, _, ref e) |
734 hir::ItemConst(_, ref e) => check_const(ccx, &e, it.id),
735 hir::ItemEnum(ref enum_definition, _) => {
736 check_enum_variants(ccx,
738 &enum_definition.variants,
741 hir::ItemFn(..) => {} // entirely within check_item_body
742 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
743 debug!("ItemImpl {} with id {}", it.name, it.id);
744 let impl_def_id = ccx.tcx.map.local_def_id(it.id);
745 match ccx.tcx.impl_trait_ref(impl_def_id) {
746 Some(impl_trait_ref) => {
747 check_impl_items_against_trait(ccx,
752 let trait_def_id = impl_trait_ref.def_id;
753 check_on_unimplemented(ccx, trait_def_id, it);
758 hir::ItemTrait(..) => {
759 let def_id = ccx.tcx.map.local_def_id(it.id);
760 check_on_unimplemented(ccx, def_id, it);
762 hir::ItemStruct(..) => {
763 check_struct(ccx, it.id, it.span);
765 hir::ItemTy(_, ref generics) => {
766 let pty_ty = ccx.tcx.node_id_to_type(it.id);
767 check_bounds_are_used(ccx, generics, pty_ty);
769 hir::ItemForeignMod(ref m) => {
770 if m.abi == Abi::RustIntrinsic {
771 for item in &m.items {
772 intrinsic::check_intrinsic_type(ccx, item);
774 } else if m.abi == Abi::PlatformIntrinsic {
775 for item in &m.items {
776 intrinsic::check_platform_intrinsic_type(ccx, item);
779 for item in &m.items {
780 let pty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(item.id));
781 if !pty.generics.types.is_empty() {
782 let mut err = struct_span_err!(ccx.tcx.sess, item.span, E0044,
783 "foreign items may not have type parameters");
784 span_help!(&mut err, item.span,
785 "consider using specialization instead of \
790 if let hir::ForeignItemFn(ref fn_decl, _) = item.node {
791 require_c_abi_if_variadic(ccx.tcx, fn_decl, m.abi, item.span);
796 _ => {/* nothing to do */ }
800 pub fn check_item_body<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
801 debug!("check_item_body(it.id={}, it.name={})",
803 ccx.tcx.item_path_str(ccx.tcx.map.local_def_id(it.id)));
804 let _indenter = indenter();
806 hir::ItemFn(ref decl, _, _, _, _, ref body) => {
807 check_bare_fn(ccx, &decl, &body, it.id);
809 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
810 debug!("ItemImpl {} with id {}", it.name, it.id);
812 for impl_item in impl_items {
813 match impl_item.node {
814 hir::ImplItemKind::Const(_, ref expr) => {
815 check_const(ccx, &expr, impl_item.id)
817 hir::ImplItemKind::Method(ref sig, ref body) => {
818 check_bare_fn(ccx, &sig.decl, body, impl_item.id);
820 hir::ImplItemKind::Type(_) => {
821 // Nothing to do here.
826 hir::ItemTrait(_, _, _, ref trait_items) => {
827 for trait_item in trait_items {
828 match trait_item.node {
829 hir::ConstTraitItem(_, Some(ref expr)) => {
830 check_const(ccx, &expr, trait_item.id)
832 hir::MethodTraitItem(ref sig, Some(ref body)) => {
833 check_bare_fn(ccx, &sig.decl, body, trait_item.id);
835 hir::MethodTraitItem(_, None) |
836 hir::ConstTraitItem(_, None) |
837 hir::TypeTraitItem(..) => {
843 _ => {/* nothing to do */ }
847 fn check_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
850 let generics = ccx.tcx.lookup_generics(def_id);
851 if let Some(ref attr) = item.attrs.iter().find(|a| {
852 a.check_name("rustc_on_unimplemented")
854 if let Some(ref istring) = attr.value_str() {
855 let parser = Parser::new(&istring);
856 let types = &generics.types;
857 for token in parser {
859 Piece::String(_) => (), // Normal string, no need to check it
860 Piece::NextArgument(a) => match a.position {
861 // `{Self}` is allowed
862 Position::ArgumentNamed(s) if s == "Self" => (),
863 // So is `{A}` if A is a type parameter
864 Position::ArgumentNamed(s) => match types.iter().find(|t| {
869 let name = ccx.tcx.item_name(def_id);
870 span_err!(ccx.tcx.sess, attr.span, E0230,
871 "there is no type parameter \
876 // `{:1}` and `{}` are not to be used
877 Position::ArgumentIs(_) => {
878 span_err!(ccx.tcx.sess, attr.span, E0231,
879 "only named substitution \
880 parameters are allowed");
887 ccx.tcx.sess, attr.span, E0232,
888 "this attribute must have a value")
889 .span_label(attr.span, &format!("attribute requires a value"))
890 .note(&format!("eg `#[rustc_on_unimplemented = \"foo\"]`"))
896 fn report_forbidden_specialization<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
897 impl_item: &hir::ImplItem,
900 let mut err = struct_span_err!(
901 tcx.sess, impl_item.span, E0520,
902 "`{}` specializes an item from a parent `impl`, but \
903 neither that item nor the `impl` are marked `default`",
905 err.span_label(impl_item.span, &format!("cannot specialize default item `{}`",
908 match tcx.span_of_impl(parent_impl) {
910 err.span_label(span, &"parent `impl` is here");
911 err.note(&format!("to specialize, either the parent `impl` or `{}` \
912 in the parent `impl` must be marked `default`",
916 err.note(&format!("parent implementation is in crate `{}`", cname));
923 fn check_specialization_validity<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
924 trait_def: &ty::TraitDef<'tcx>,
926 impl_item: &hir::ImplItem)
928 let ancestors = trait_def.ancestors(impl_id);
930 let parent = match impl_item.node {
931 hir::ImplItemKind::Const(..) => {
932 ancestors.const_defs(tcx, impl_item.name).skip(1).next()
933 .map(|node_item| node_item.map(|parent| parent.defaultness))
935 hir::ImplItemKind::Method(..) => {
936 ancestors.fn_defs(tcx, impl_item.name).skip(1).next()
937 .map(|node_item| node_item.map(|parent| parent.defaultness))
940 hir::ImplItemKind::Type(_) => {
941 ancestors.type_defs(tcx, impl_item.name).skip(1).next()
942 .map(|node_item| node_item.map(|parent| parent.defaultness))
946 if let Some(parent) = parent {
947 if parent.item.is_final() {
948 report_forbidden_specialization(tcx, impl_item, parent.node.def_id());
954 fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
957 impl_trait_ref: &ty::TraitRef<'tcx>,
958 impl_items: &[hir::ImplItem]) {
959 // If the trait reference itself is erroneous (so the compilation is going
960 // to fail), skip checking the items here -- the `impl_item` table in `tcx`
961 // isn't populated for such impls.
962 if impl_trait_ref.references_error() { return; }
964 // Locate trait definition and items
966 let trait_def = tcx.lookup_trait_def(impl_trait_ref.def_id);
967 let trait_items = tcx.trait_items(impl_trait_ref.def_id);
968 let mut overridden_associated_type = None;
970 // Check existing impl methods to see if they are both present in trait
971 // and compatible with trait signature
972 for impl_item in impl_items {
973 let ty_impl_item = tcx.impl_or_trait_item(tcx.map.local_def_id(impl_item.id));
974 let ty_trait_item = trait_items.iter()
975 .find(|ac| ac.name() == ty_impl_item.name());
977 // Check that impl definition matches trait definition
978 if let Some(ty_trait_item) = ty_trait_item {
979 match impl_item.node {
980 hir::ImplItemKind::Const(..) => {
981 let impl_const = match ty_impl_item {
982 ty::ConstTraitItem(ref cti) => cti,
983 _ => span_bug!(impl_item.span, "non-const impl-item for const")
986 // Find associated const definition.
987 if let &ty::ConstTraitItem(ref trait_const) = ty_trait_item {
988 compare_const_impl(ccx,
994 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0323,
995 "item `{}` is an associated const, \
996 which doesn't match its trait `{:?}`",
999 err.span_label(impl_item.span, &format!("does not match trait"));
1000 // We can only get the spans from local trait definition
1001 // Same for E0324 and E0325
1002 if let Some(trait_span) = tcx.map.span_if_local(ty_trait_item.def_id()) {
1003 err.span_label(trait_span, &format!("item in trait"));
1008 hir::ImplItemKind::Method(_, ref body) => {
1009 let impl_method = match ty_impl_item {
1010 ty::MethodTraitItem(ref mti) => mti,
1011 _ => span_bug!(impl_item.span, "non-method impl-item for method")
1014 if let &ty::MethodTraitItem(ref trait_method) = ty_trait_item {
1015 compare_impl_method(ccx,
1022 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0324,
1023 "item `{}` is an associated method, \
1024 which doesn't match its trait `{:?}`",
1027 err.span_label(impl_item.span, &format!("does not match trait"));
1028 if let Some(trait_span) = tcx.map.span_if_local(ty_trait_item.def_id()) {
1029 err.span_label(trait_span, &format!("item in trait"));
1034 hir::ImplItemKind::Type(_) => {
1035 let impl_type = match ty_impl_item {
1036 ty::TypeTraitItem(ref tti) => tti,
1037 _ => span_bug!(impl_item.span, "non-type impl-item for type")
1040 if let &ty::TypeTraitItem(ref at) = ty_trait_item {
1041 if let Some(_) = at.ty {
1042 overridden_associated_type = Some(impl_item);
1045 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0325,
1046 "item `{}` is an associated type, \
1047 which doesn't match its trait `{:?}`",
1050 err.span_label(impl_item.span, &format!("does not match trait"));
1051 if let Some(trait_span) = tcx.map.span_if_local(ty_trait_item.def_id()) {
1052 err.span_label(trait_span, &format!("item in trait"));
1060 check_specialization_validity(tcx, trait_def, impl_id, impl_item);
1063 // Check for missing items from trait
1064 let provided_methods = tcx.provided_trait_methods(impl_trait_ref.def_id);
1065 let mut missing_items = Vec::new();
1066 let mut invalidated_items = Vec::new();
1067 let associated_type_overridden = overridden_associated_type.is_some();
1068 for trait_item in trait_items.iter() {
1073 ty::ConstTraitItem(ref associated_const) => {
1074 is_provided = associated_const.has_value;
1075 is_implemented = impl_items.iter().any(|ii| {
1077 hir::ImplItemKind::Const(..) => {
1078 ii.name == associated_const.name
1084 ty::MethodTraitItem(ref trait_method) => {
1085 is_provided = provided_methods.iter().any(|m| m.name == trait_method.name);
1086 is_implemented = trait_def.ancestors(impl_id)
1087 .fn_defs(tcx, trait_method.name)
1089 .map(|node_item| !node_item.node.is_from_trait())
1092 ty::TypeTraitItem(ref trait_assoc_ty) => {
1093 is_provided = trait_assoc_ty.ty.is_some();
1094 is_implemented = trait_def.ancestors(impl_id)
1095 .type_defs(tcx, trait_assoc_ty.name)
1097 .map(|node_item| !node_item.node.is_from_trait())
1102 if !is_implemented {
1104 missing_items.push(trait_item.name());
1105 } else if associated_type_overridden {
1106 invalidated_items.push(trait_item.name());
1111 if !missing_items.is_empty() {
1112 struct_span_err!(tcx.sess, impl_span, E0046,
1113 "not all trait items implemented, missing: `{}`",
1114 missing_items.iter()
1115 .map(|name| name.to_string())
1116 .collect::<Vec<_>>().join("`, `"))
1117 .span_label(impl_span, &format!("missing `{}` in implementation",
1118 missing_items.iter()
1119 .map(|name| name.to_string())
1120 .collect::<Vec<_>>().join("`, `"))
1124 if !invalidated_items.is_empty() {
1125 let invalidator = overridden_associated_type.unwrap();
1126 span_err!(tcx.sess, invalidator.span, E0399,
1127 "the following trait items need to be reimplemented \
1128 as `{}` was overridden: `{}`",
1130 invalidated_items.iter()
1131 .map(|name| name.to_string())
1132 .collect::<Vec<_>>().join("`, `"))
1136 /// Checks a constant with a given type.
1137 fn check_const_with_type<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
1138 expr: &'tcx hir::Expr,
1139 expected_type: Ty<'tcx>,
1141 ccx.inherited(id).enter(|inh| {
1142 let fcx = FnCtxt::new(&inh, expected_type, expr.id);
1143 fcx.require_type_is_sized(expected_type, expr.span, traits::ConstSized);
1145 // Gather locals in statics (because of block expressions).
1146 // This is technically unnecessary because locals in static items are forbidden,
1147 // but prevents type checking from blowing up before const checking can properly
1149 GatherLocalsVisitor { fcx: &fcx }.visit_expr(expr);
1151 fcx.check_expr_coercable_to_type(expr, expected_type);
1153 fcx.select_all_obligations_and_apply_defaults();
1154 fcx.closure_analyze_const(expr);
1155 fcx.select_obligations_where_possible();
1157 fcx.select_all_obligations_or_error();
1159 fcx.regionck_expr(expr);
1160 fcx.resolve_type_vars_in_expr(expr, id);
1164 fn check_const<'a, 'tcx>(ccx: &CrateCtxt<'a,'tcx>,
1165 expr: &'tcx hir::Expr,
1167 let decl_ty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(id)).ty;
1168 check_const_with_type(ccx, expr, decl_ty, id);
1171 /// Checks whether a type can be represented in memory. In particular, it
1172 /// identifies types that contain themselves without indirection through a
1173 /// pointer, which would mean their size is unbounded.
1174 pub fn check_representable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
1176 item_id: ast::NodeId,
1177 _designation: &str) -> bool {
1178 let rty = tcx.node_id_to_type(item_id);
1180 // Check that it is possible to represent this type. This call identifies
1181 // (1) types that contain themselves and (2) types that contain a different
1182 // recursive type. It is only necessary to throw an error on those that
1183 // contain themselves. For case 2, there must be an inner type that will be
1184 // caught by case 1.
1185 match rty.is_representable(tcx, sp) {
1186 Representability::SelfRecursive => {
1187 let item_def_id = tcx.map.local_def_id(item_id);
1188 tcx.recursive_type_with_infinite_size_error(item_def_id).emit();
1191 Representability::Representable | Representability::ContainsRecursive => (),
1196 pub fn check_simd<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span, id: ast::NodeId) {
1197 let t = tcx.node_id_to_type(id);
1199 ty::TyStruct(def, substs) => {
1200 let fields = &def.struct_variant().fields;
1201 if fields.is_empty() {
1202 span_err!(tcx.sess, sp, E0075, "SIMD vector cannot be empty");
1205 let e = fields[0].ty(tcx, substs);
1206 if !fields.iter().all(|f| f.ty(tcx, substs) == e) {
1207 struct_span_err!(tcx.sess, sp, E0076, "SIMD vector should be homogeneous")
1208 .span_label(sp, &format!("SIMD elements must have the same type"))
1213 ty::TyParam(_) => { /* struct<T>(T, T, T, T) is ok */ }
1214 _ if e.is_machine() => { /* struct(u8, u8, u8, u8) is ok */ }
1216 span_err!(tcx.sess, sp, E0077,
1217 "SIMD vector element type should be machine type");
1226 #[allow(trivial_numeric_casts)]
1227 pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
1229 vs: &'tcx [hir::Variant],
1231 let def_id = ccx.tcx.map.local_def_id(id);
1232 let hint = *ccx.tcx.lookup_repr_hints(def_id).get(0).unwrap_or(&attr::ReprAny);
1234 if hint != attr::ReprAny && vs.is_empty() {
1236 ccx.tcx.sess, sp, E0084,
1237 "unsupported representation for zero-variant enum")
1238 .span_label(sp, &format!("unsupported enum representation"))
1242 let repr_type_ty = ccx.tcx.enum_repr_type(Some(&hint)).to_ty(ccx.tcx);
1244 if let Some(ref e) = v.node.disr_expr {
1245 check_const_with_type(ccx, e, repr_type_ty, e.id);
1249 let def_id = ccx.tcx.map.local_def_id(id);
1251 let variants = &ccx.tcx.lookup_adt_def(def_id).variants;
1252 let mut disr_vals: Vec<ty::Disr> = Vec::new();
1253 for (v, variant) in vs.iter().zip(variants.iter()) {
1254 let current_disr_val = variant.disr_val;
1256 // Check for duplicate discriminant values
1257 if let Some(i) = disr_vals.iter().position(|&x| x == current_disr_val) {
1258 let variant_i_node_id = ccx.tcx.map.as_local_node_id(variants[i].did).unwrap();
1259 let variant_i = ccx.tcx.map.expect_variant(variant_i_node_id);
1260 let i_span = match variant_i.node.disr_expr {
1261 Some(ref expr) => expr.span,
1262 None => ccx.tcx.map.span(variant_i_node_id)
1264 let span = match v.node.disr_expr {
1265 Some(ref expr) => expr.span,
1268 struct_span_err!(ccx.tcx.sess, span, E0081,
1269 "discriminant value `{}` already exists", disr_vals[i])
1270 .span_label(i_span, &format!("first use of `{}`", disr_vals[i]))
1271 .span_label(span , &format!("enum already has `{}`", disr_vals[i]))
1274 disr_vals.push(current_disr_val);
1277 check_representable(ccx.tcx, sp, id, "enum");
1280 impl<'a, 'gcx, 'tcx> AstConv<'gcx, 'tcx> for FnCtxt<'a, 'gcx, 'tcx> {
1281 fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
1283 fn ast_ty_to_ty_cache(&self) -> &RefCell<NodeMap<Ty<'tcx>>> {
1284 &self.ast_ty_to_ty_cache
1287 fn get_generics(&self, _: Span, id: DefId)
1288 -> Result<&'tcx ty::Generics<'tcx>, ErrorReported>
1290 Ok(self.tcx().lookup_generics(id))
1293 fn get_item_type_scheme(&self, _: Span, id: DefId)
1294 -> Result<ty::TypeScheme<'tcx>, ErrorReported>
1296 Ok(self.tcx().lookup_item_type(id))
1299 fn get_trait_def(&self, _: Span, id: DefId)
1300 -> Result<&'tcx ty::TraitDef<'tcx>, ErrorReported>
1302 Ok(self.tcx().lookup_trait_def(id))
1305 fn ensure_super_predicates(&self, _: Span, _: DefId) -> Result<(), ErrorReported> {
1306 // all super predicates are ensured during collect pass
1310 fn get_free_substs(&self) -> Option<&Substs<'tcx>> {
1311 Some(&self.parameter_environment.free_substs)
1314 fn get_type_parameter_bounds(&self,
1316 node_id: ast::NodeId)
1317 -> Result<Vec<ty::PolyTraitRef<'tcx>>, ErrorReported>
1319 let def = self.tcx.type_parameter_def(node_id);
1320 let r = self.parameter_environment
1323 .filter_map(|predicate| {
1325 ty::Predicate::Trait(ref data) => {
1326 if data.0.self_ty().is_param(def.index) {
1327 Some(data.to_poly_trait_ref())
1341 fn trait_defines_associated_type_named(&self,
1342 trait_def_id: DefId,
1343 assoc_name: ast::Name)
1346 let trait_def = self.tcx().lookup_trait_def(trait_def_id);
1347 trait_def.associated_type_names.contains(&assoc_name)
1350 fn ty_infer(&self, _span: Span) -> Ty<'tcx> {
1354 fn ty_infer_for_def(&self,
1355 ty_param_def: &ty::TypeParameterDef<'tcx>,
1356 substs: &Substs<'tcx>,
1357 span: Span) -> Ty<'tcx> {
1358 self.type_var_for_def(span, ty_param_def, substs)
1361 fn projected_ty_from_poly_trait_ref(&self,
1363 poly_trait_ref: ty::PolyTraitRef<'tcx>,
1364 item_name: ast::Name)
1367 let (trait_ref, _) =
1368 self.replace_late_bound_regions_with_fresh_var(
1370 infer::LateBoundRegionConversionTime::AssocTypeProjection(item_name),
1373 self.normalize_associated_type(span, trait_ref, item_name)
1376 fn projected_ty(&self,
1378 trait_ref: ty::TraitRef<'tcx>,
1379 item_name: ast::Name)
1382 self.normalize_associated_type(span, trait_ref, item_name)
1385 fn set_tainted_by_errors(&self) {
1386 self.infcx.set_tainted_by_errors()
1390 impl<'a, 'gcx, 'tcx> RegionScope for FnCtxt<'a, 'gcx, 'tcx> {
1391 fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> {
1392 Some(self.base_object_lifetime_default(span))
1395 fn base_object_lifetime_default(&self, span: Span) -> ty::Region {
1396 // RFC #599 specifies that object lifetime defaults take
1397 // precedence over other defaults. But within a fn body we
1398 // don't have a *default* region, rather we use inference to
1399 // find the *correct* region, which is strictly more general
1400 // (and anyway, within a fn body the right region may not even
1401 // be something the user can write explicitly, since it might
1402 // be some expression).
1403 *self.next_region_var(infer::MiscVariable(span))
1406 fn anon_regions(&self, span: Span, count: usize)
1407 -> Result<Vec<ty::Region>, Option<Vec<ElisionFailureInfo>>> {
1408 Ok((0..count).map(|_| {
1409 *self.next_region_var(infer::MiscVariable(span))
1414 /// Controls whether the arguments are tupled. This is used for the call
1417 /// Tupling means that all call-side arguments are packed into a tuple and
1418 /// passed as a single parameter. For example, if tupling is enabled, this
1421 /// fn f(x: (isize, isize))
1423 /// Can be called as:
1430 #[derive(Clone, Eq, PartialEq)]
1431 enum TupleArgumentsFlag {
1436 impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
1437 pub fn new(inh: &'a Inherited<'a, 'gcx, 'tcx>,
1439 body_id: ast::NodeId)
1440 -> FnCtxt<'a, 'gcx, 'tcx> {
1442 ast_ty_to_ty_cache: RefCell::new(NodeMap()),
1444 writeback_errors: Cell::new(false),
1445 err_count_on_creation: inh.tcx.sess.err_count(),
1447 ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, 0)),
1452 pub fn param_env(&self) -> &ty::ParameterEnvironment<'tcx> {
1453 &self.parameter_environment
1456 pub fn sess(&self) -> &Session {
1460 pub fn err_count_since_creation(&self) -> usize {
1461 self.tcx.sess.err_count() - self.err_count_on_creation
1464 /// Resolves type variables in `ty` if possible. Unlike the infcx
1465 /// version (resolve_type_vars_if_possible), this version will
1466 /// also select obligations if it seems useful, in an effort
1467 /// to get more type information.
1468 fn resolve_type_vars_with_obligations(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> {
1469 debug!("resolve_type_vars_with_obligations(ty={:?})", ty);
1471 // No TyInfer()? Nothing needs doing.
1472 if !ty.has_infer_types() {
1473 debug!("resolve_type_vars_with_obligations: ty={:?}", ty);
1477 // If `ty` is a type variable, see whether we already know what it is.
1478 ty = self.resolve_type_vars_if_possible(&ty);
1479 if !ty.has_infer_types() {
1480 debug!("resolve_type_vars_with_obligations: ty={:?}", ty);
1484 // If not, try resolving pending obligations as much as
1485 // possible. This can help substantially when there are
1486 // indirect dependencies that don't seem worth tracking
1488 self.select_obligations_where_possible();
1489 ty = self.resolve_type_vars_if_possible(&ty);
1491 debug!("resolve_type_vars_with_obligations: ty={:?}", ty);
1495 fn record_deferred_call_resolution(&self,
1496 closure_def_id: DefId,
1497 r: DeferredCallResolutionHandler<'gcx, 'tcx>) {
1498 let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
1499 deferred_call_resolutions.entry(closure_def_id).or_insert(vec![]).push(r);
1502 fn remove_deferred_call_resolutions(&self,
1503 closure_def_id: DefId)
1504 -> Vec<DeferredCallResolutionHandler<'gcx, 'tcx>>
1506 let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
1507 deferred_call_resolutions.remove(&closure_def_id).unwrap_or(Vec::new())
1510 pub fn tag(&self) -> String {
1511 let self_ptr: *const FnCtxt = self;
1512 format!("{:?}", self_ptr)
1515 pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> Ty<'tcx> {
1516 match self.locals.borrow().get(&nid) {
1519 span_err!(self.tcx.sess, span, E0513,
1520 "no type for local variable {}",
1528 pub fn write_ty(&self, node_id: ast::NodeId, ty: Ty<'tcx>) {
1529 debug!("write_ty({}, {:?}) in fcx {}",
1530 node_id, ty, self.tag());
1531 self.tables.borrow_mut().node_types.insert(node_id, ty);
1533 // Add adjustments to !-expressions
1535 if let Some(hir::map::NodeExpr(_)) = self.tcx.map.find(node_id) {
1536 let adj = adjustment::AdjustNeverToAny(self.next_diverging_ty_var());
1537 self.write_adjustment(node_id, adj);
1542 pub fn write_substs(&self, node_id: ast::NodeId, substs: ty::ItemSubsts<'tcx>) {
1543 if !substs.substs.is_noop() {
1544 debug!("write_substs({}, {:?}) in fcx {}",
1549 self.tables.borrow_mut().item_substs.insert(node_id, substs);
1553 pub fn write_autoderef_adjustment(&self,
1554 node_id: ast::NodeId,
1556 self.write_adjustment(
1558 adjustment::AdjustDerefRef(adjustment::AutoDerefRef {
1566 pub fn write_adjustment(&self,
1567 node_id: ast::NodeId,
1568 adj: adjustment::AutoAdjustment<'tcx>) {
1569 debug!("write_adjustment(node_id={}, adj={:?})", node_id, adj);
1571 if adj.is_identity() {
1575 self.tables.borrow_mut().adjustments.insert(node_id, adj);
1578 /// Basically whenever we are converting from a type scheme into
1579 /// the fn body space, we always want to normalize associated
1580 /// types as well. This function combines the two.
1581 fn instantiate_type_scheme<T>(&self,
1583 substs: &Substs<'tcx>,
1586 where T : TypeFoldable<'tcx>
1588 let value = value.subst(self.tcx, substs);
1589 let result = self.normalize_associated_types_in(span, &value);
1590 debug!("instantiate_type_scheme(value={:?}, substs={:?}) = {:?}",
1597 /// As `instantiate_type_scheme`, but for the bounds found in a
1598 /// generic type scheme.
1599 fn instantiate_bounds(&self,
1601 substs: &Substs<'tcx>,
1602 bounds: &ty::GenericPredicates<'tcx>)
1603 -> ty::InstantiatedPredicates<'tcx>
1605 let result = bounds.instantiate(self.tcx, substs);
1606 let result = self.normalize_associated_types_in(span, &result.predicates);
1607 debug!("instantiate_bounds(bounds={:?}, substs={:?}) = {:?}",
1611 ty::InstantiatedPredicates {
1616 /// Replace all anonymized types with fresh inference variables
1617 /// and record them for writeback.
1618 fn instantiate_anon_types<T: TypeFoldable<'tcx>>(&self, value: &T) -> T {
1619 value.fold_with(&mut BottomUpFolder { tcx: self.tcx, fldop: |ty| {
1620 if let ty::TyAnon(def_id, substs) = ty.sty {
1621 // Use the same type variable if the exact same TyAnon appears more
1622 // than once in the return type (e.g. if it's pased to a type alias).
1623 if let Some(ty_var) = self.anon_types.borrow().get(&def_id) {
1626 let ty_var = self.next_ty_var();
1627 self.anon_types.borrow_mut().insert(def_id, ty_var);
1629 let item_predicates = self.tcx.lookup_predicates(def_id);
1630 let bounds = item_predicates.instantiate(self.tcx, substs);
1632 let span = self.tcx.map.def_id_span(def_id, codemap::DUMMY_SP);
1633 for predicate in bounds.predicates {
1634 // Change the predicate to refer to the type variable,
1635 // which will be the concrete type, instead of the TyAnon.
1636 // This also instantiates nested `impl Trait`.
1637 let predicate = self.instantiate_anon_types(&predicate);
1639 // Require that the predicate holds for the concrete type.
1640 let cause = traits::ObligationCause::new(span, self.body_id,
1641 traits::ReturnType);
1642 self.register_predicate(traits::Obligation::new(cause, predicate));
1652 fn normalize_associated_types_in<T>(&self, span: Span, value: &T) -> T
1653 where T : TypeFoldable<'tcx>
1655 self.inh.normalize_associated_types_in(span, self.body_id, value)
1658 fn normalize_associated_type(&self,
1660 trait_ref: ty::TraitRef<'tcx>,
1661 item_name: ast::Name)
1664 let cause = traits::ObligationCause::new(span,
1666 traits::ObligationCauseCode::MiscObligation);
1669 .normalize_projection_type(self,
1671 trait_ref: trait_ref,
1672 item_name: item_name,
1677 /// Instantiates the type in `did` with the generics in `path` and returns
1678 /// it (registering the necessary trait obligations along the way).
1680 /// Note that this function is only intended to be used with type-paths,
1681 /// not with value-paths.
1682 pub fn instantiate_type_path(&self,
1685 node_id: ast::NodeId)
1687 debug!("instantiate_type_path(did={:?}, path={:?})", did, path);
1688 let mut ty = self.tcx.lookup_item_type(did).ty;
1690 // Tuple variants have fn type even in type namespace, extract true variant type from it
1691 ty = self.tcx.no_late_bound_regions(&ty.fn_ret()).unwrap();
1693 let type_predicates = self.tcx.lookup_predicates(did);
1694 let substs = AstConv::ast_path_substs_for_ty(self, self,
1696 PathParamMode::Optional,
1698 path.segments.last().unwrap());
1699 debug!("instantiate_type_path: ty={:?} substs={:?}", ty, substs);
1700 let bounds = self.instantiate_bounds(path.span, substs, &type_predicates);
1701 let cause = traits::ObligationCause::new(path.span, self.body_id,
1702 traits::ItemObligation(did));
1703 self.add_obligations_for_parameters(cause, &bounds);
1705 let ty_substituted = self.instantiate_type_scheme(path.span, substs, &ty);
1706 self.write_ty(node_id, ty_substituted);
1707 self.write_substs(node_id, ty::ItemSubsts {
1713 pub fn write_nil(&self, node_id: ast::NodeId) {
1714 self.write_ty(node_id, self.tcx.mk_nil());
1717 pub fn write_never(&self, node_id: ast::NodeId) {
1718 self.write_ty(node_id, self.tcx.types.never);
1721 pub fn write_error(&self, node_id: ast::NodeId) {
1722 self.write_ty(node_id, self.tcx.types.err);
1725 pub fn require_type_meets(&self,
1728 code: traits::ObligationCauseCode<'tcx>,
1729 bound: ty::BuiltinBound)
1731 self.register_builtin_bound(
1734 traits::ObligationCause::new(span, self.body_id, code));
1737 pub fn require_type_is_sized(&self,
1740 code: traits::ObligationCauseCode<'tcx>)
1742 self.require_type_meets(ty, span, code, ty::BoundSized);
1745 pub fn require_expr_have_sized_type(&self,
1747 code: traits::ObligationCauseCode<'tcx>)
1749 self.require_type_is_sized(self.expr_ty(expr), expr.span, code);
1752 pub fn register_builtin_bound(&self,
1754 builtin_bound: ty::BuiltinBound,
1755 cause: traits::ObligationCause<'tcx>)
1757 self.fulfillment_cx.borrow_mut()
1758 .register_builtin_bound(self, ty, builtin_bound, cause);
1761 pub fn register_predicate(&self,
1762 obligation: traits::PredicateObligation<'tcx>)
1764 debug!("register_predicate({:?})",
1768 .register_predicate_obligation(self, obligation);
1771 pub fn to_ty(&self, ast_t: &hir::Ty) -> Ty<'tcx> {
1772 let t = AstConv::ast_ty_to_ty(self, self, ast_t);
1773 self.register_wf_obligation(t, ast_t.span, traits::MiscObligation);
1777 pub fn expr_ty(&self, ex: &hir::Expr) -> Ty<'tcx> {
1778 if let Some(&adjustment::AdjustNeverToAny(ref t))
1779 = self.tables.borrow().adjustments.get(&ex.id) {
1782 match self.tables.borrow().node_types.get(&ex.id) {
1785 bug!("no type for expr in fcx {}", self.tag());
1790 /// Apply `adjustment` to the type of `expr`
1791 pub fn adjust_expr_ty(&self,
1793 adjustment: Option<&adjustment::AutoAdjustment<'tcx>>)
1796 let raw_ty = self.expr_ty(expr);
1797 let raw_ty = self.shallow_resolve(raw_ty);
1798 let resolve_ty = |ty: Ty<'tcx>| self.resolve_type_vars_if_possible(&ty);
1799 raw_ty.adjust(self.tcx, expr.span, expr.id, adjustment, |method_call| {
1800 self.tables.borrow().method_map.get(&method_call)
1801 .map(|method| resolve_ty(method.ty))
1805 pub fn node_ty(&self, id: ast::NodeId) -> Ty<'tcx> {
1806 match self.tables.borrow().node_types.get(&id) {
1808 None if self.err_count_since_creation() != 0 => self.tcx.types.err,
1810 bug!("no type for node {}: {} in fcx {}",
1811 id, self.tcx.map.node_to_string(id),
1817 pub fn item_substs(&self) -> Ref<NodeMap<ty::ItemSubsts<'tcx>>> {
1818 // NOTE: @jroesch this is hack that appears to be fixed on nightly, will monitor if
1819 // it changes when we upgrade the snapshot compiler
1820 fn project_item_susbts<'a, 'tcx>(tables: &'a ty::Tables<'tcx>)
1821 -> &'a NodeMap<ty::ItemSubsts<'tcx>> {
1825 Ref::map(self.tables.borrow(), project_item_susbts)
1828 pub fn opt_node_ty_substs<F>(&self,
1831 F: FnOnce(&ty::ItemSubsts<'tcx>),
1833 match self.tables.borrow().item_substs.get(&id) {
1839 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1840 /// outlive the region `r`.
1841 pub fn register_region_obligation(&self,
1843 region: &'tcx ty::Region,
1844 cause: traits::ObligationCause<'tcx>)
1846 let mut fulfillment_cx = self.fulfillment_cx.borrow_mut();
1847 fulfillment_cx.register_region_obligation(ty, region, cause);
1850 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1851 /// outlive the region `r`.
1852 pub fn register_wf_obligation(&self,
1855 code: traits::ObligationCauseCode<'tcx>)
1857 // WF obligations never themselves fail, so no real need to give a detailed cause:
1858 let cause = traits::ObligationCause::new(span, self.body_id, code);
1859 self.register_predicate(traits::Obligation::new(cause, ty::Predicate::WellFormed(ty)));
1862 pub fn register_old_wf_obligation(&self,
1865 code: traits::ObligationCauseCode<'tcx>)
1867 // Registers an "old-style" WF obligation that uses the
1868 // implicator code. This is basically a buggy version of
1869 // `register_wf_obligation` that is being kept around
1870 // temporarily just to help with phasing in the newer rules.
1872 // FIXME(#27579) all uses of this should be migrated to register_wf_obligation eventually
1873 let cause = traits::ObligationCause::new(span, self.body_id, code);
1874 self.register_region_obligation(ty, self.tcx.mk_region(ty::ReEmpty), cause);
1877 /// Registers obligations that all types appearing in `substs` are well-formed.
1878 pub fn add_wf_bounds(&self, substs: &Substs<'tcx>, expr: &hir::Expr)
1880 for ty in substs.types() {
1881 self.register_wf_obligation(ty, expr.span, traits::MiscObligation);
1885 /// Given a fully substituted set of bounds (`generic_bounds`), and the values with which each
1886 /// type/region parameter was instantiated (`substs`), creates and registers suitable
1887 /// trait/region obligations.
1889 /// For example, if there is a function:
1892 /// fn foo<'a,T:'a>(...)
1895 /// and a reference:
1901 /// Then we will create a fresh region variable `'$0` and a fresh type variable `$1` for `'a`
1902 /// and `T`. This routine will add a region obligation `$1:'$0` and register it locally.
1903 pub fn add_obligations_for_parameters(&self,
1904 cause: traits::ObligationCause<'tcx>,
1905 predicates: &ty::InstantiatedPredicates<'tcx>)
1907 assert!(!predicates.has_escaping_regions());
1909 debug!("add_obligations_for_parameters(predicates={:?})",
1912 for obligation in traits::predicates_for_generics(cause, predicates) {
1913 self.register_predicate(obligation);
1917 // FIXME(arielb1): use this instead of field.ty everywhere
1918 // Only for fields! Returns <none> for methods>
1919 // Indifferent to privacy flags
1920 pub fn field_ty(&self,
1922 field: ty::FieldDef<'tcx>,
1923 substs: &Substs<'tcx>)
1926 self.normalize_associated_types_in(span,
1927 &field.ty(self.tcx, substs))
1930 fn check_casts(&self) {
1931 let mut deferred_cast_checks = self.deferred_cast_checks.borrow_mut();
1932 for cast in deferred_cast_checks.drain(..) {
1937 /// Apply "fallbacks" to some types
1938 /// ! gets replaced with (), unconstrained ints with i32, and unconstrained floats with f64.
1939 fn default_type_parameters(&self) {
1940 use rustc::ty::error::UnconstrainedNumeric::Neither;
1941 use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1943 // Defaulting inference variables becomes very dubious if we have
1944 // encountered type-checking errors. Therefore, if we think we saw
1945 // some errors in this function, just resolve all uninstanted type
1946 // varibles to TyError.
1947 if self.is_tainted_by_errors() {
1948 for ty in &self.unsolved_variables() {
1949 if let ty::TyInfer(_) = self.shallow_resolve(ty).sty {
1950 debug!("default_type_parameters: defaulting `{:?}` to error", ty);
1951 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx().types.err);
1957 for ty in &self.unsolved_variables() {
1958 let resolved = self.resolve_type_vars_if_possible(ty);
1959 if self.type_var_diverges(resolved) {
1960 debug!("default_type_parameters: defaulting `{:?}` to `!` because it diverges",
1962 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
1963 self.tcx.mk_diverging_default());
1965 match self.type_is_unconstrained_numeric(resolved) {
1966 UnconstrainedInt => {
1967 debug!("default_type_parameters: defaulting `{:?}` to `i32`",
1969 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
1971 UnconstrainedFloat => {
1972 debug!("default_type_parameters: defaulting `{:?}` to `f32`",
1974 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
1982 fn select_all_obligations_and_apply_defaults(&self) {
1983 if self.tcx.sess.features.borrow().default_type_parameter_fallback {
1984 self.new_select_all_obligations_and_apply_defaults();
1986 self.old_select_all_obligations_and_apply_defaults();
1990 // Implements old type inference fallback algorithm
1991 fn old_select_all_obligations_and_apply_defaults(&self) {
1992 self.select_obligations_where_possible();
1993 self.default_type_parameters();
1994 self.select_obligations_where_possible();
1997 fn new_select_all_obligations_and_apply_defaults(&self) {
1998 use rustc::ty::error::UnconstrainedNumeric::Neither;
1999 use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
2001 // For the time being this errs on the side of being memory wasteful but provides better
2003 // let type_variables = self.type_variables.clone();
2005 // There is a possibility that this algorithm will have to run an arbitrary number of times
2006 // to terminate so we bound it by the compiler's recursion limit.
2007 for _ in 0..self.tcx.sess.recursion_limit.get() {
2008 // First we try to solve all obligations, it is possible that the last iteration
2009 // has made it possible to make more progress.
2010 self.select_obligations_where_possible();
2012 let mut conflicts = Vec::new();
2014 // Collect all unsolved type, integral and floating point variables.
2015 let unsolved_variables = self.unsolved_variables();
2017 // We must collect the defaults *before* we do any unification. Because we have
2018 // directly attached defaults to the type variables any unification that occurs
2019 // will erase defaults causing conflicting defaults to be completely ignored.
2020 let default_map: FnvHashMap<_, _> =
2023 .filter_map(|t| self.default(t).map(|d| (t, d)))
2026 let mut unbound_tyvars = FnvHashSet();
2028 debug!("select_all_obligations_and_apply_defaults: defaults={:?}", default_map);
2030 // We loop over the unsolved variables, resolving them and if they are
2031 // and unconstrainted numeric type we add them to the set of unbound
2032 // variables. We do this so we only apply literal fallback to type
2033 // variables without defaults.
2034 for ty in &unsolved_variables {
2035 let resolved = self.resolve_type_vars_if_possible(ty);
2036 if self.type_var_diverges(resolved) {
2037 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
2038 self.tcx.mk_diverging_default());
2040 match self.type_is_unconstrained_numeric(resolved) {
2041 UnconstrainedInt | UnconstrainedFloat => {
2042 unbound_tyvars.insert(resolved);
2049 // We now remove any numeric types that also have defaults, and instead insert
2050 // the type variable with a defined fallback.
2051 for ty in &unsolved_variables {
2052 if let Some(_default) = default_map.get(ty) {
2053 let resolved = self.resolve_type_vars_if_possible(ty);
2055 debug!("select_all_obligations_and_apply_defaults: \
2056 ty: {:?} with default: {:?}",
2059 match resolved.sty {
2060 ty::TyInfer(ty::TyVar(_)) => {
2061 unbound_tyvars.insert(ty);
2064 ty::TyInfer(ty::IntVar(_)) | ty::TyInfer(ty::FloatVar(_)) => {
2065 unbound_tyvars.insert(ty);
2066 if unbound_tyvars.contains(resolved) {
2067 unbound_tyvars.remove(resolved);
2076 // If there are no more fallbacks to apply at this point we have applied all possible
2077 // defaults and type inference will proceed as normal.
2078 if unbound_tyvars.is_empty() {
2082 // Finally we go through each of the unbound type variables and unify them with
2083 // the proper fallback, reporting a conflicting default error if any of the
2084 // unifications fail. We know it must be a conflicting default because the
2085 // variable would only be in `unbound_tyvars` and have a concrete value if
2086 // it had been solved by previously applying a default.
2088 // We wrap this in a transaction for error reporting, if we detect a conflict
2089 // we will rollback the inference context to its prior state so we can probe
2090 // for conflicts and correctly report them.
2093 let _ = self.commit_if_ok(|_: &infer::CombinedSnapshot| {
2094 for ty in &unbound_tyvars {
2095 if self.type_var_diverges(ty) {
2096 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
2097 self.tcx.mk_diverging_default());
2099 match self.type_is_unconstrained_numeric(ty) {
2100 UnconstrainedInt => {
2101 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
2103 UnconstrainedFloat => {
2104 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
2107 if let Some(default) = default_map.get(ty) {
2108 let default = default.clone();
2109 match self.eq_types(false,
2110 TypeOrigin::Misc(default.origin_span),
2112 Ok(InferOk { obligations, .. }) => {
2113 // FIXME(#32730) propagate obligations
2114 assert!(obligations.is_empty())
2117 conflicts.push((*ty, default));
2126 // If there are conflicts we rollback, otherwise commit
2127 if conflicts.len() > 0 {
2134 if conflicts.len() > 0 {
2135 // Loop through each conflicting default, figuring out the default that caused
2136 // a unification failure and then report an error for each.
2137 for (conflict, default) in conflicts {
2138 let conflicting_default =
2139 self.find_conflicting_default(&unbound_tyvars, &default_map, conflict)
2140 .unwrap_or(type_variable::Default {
2141 ty: self.next_ty_var(),
2142 origin_span: syntax_pos::DUMMY_SP,
2143 def_id: self.tcx.map.local_def_id(0) // what do I put here?
2146 // This is to ensure that we elimnate any non-determinism from the error
2147 // reporting by fixing an order, it doesn't matter what order we choose
2148 // just that it is consistent.
2149 let (first_default, second_default) =
2150 if default.def_id < conflicting_default.def_id {
2151 (default, conflicting_default)
2153 (conflicting_default, default)
2157 self.report_conflicting_default_types(
2158 first_default.origin_span,
2165 self.select_obligations_where_possible();
2168 // For use in error handling related to default type parameter fallback. We explicitly
2169 // apply the default that caused conflict first to a local version of the type variable
2170 // table then apply defaults until we find a conflict. That default must be the one
2171 // that caused conflict earlier.
2172 fn find_conflicting_default(&self,
2173 unbound_vars: &FnvHashSet<Ty<'tcx>>,
2174 default_map: &FnvHashMap<&Ty<'tcx>, type_variable::Default<'tcx>>,
2176 -> Option<type_variable::Default<'tcx>> {
2177 use rustc::ty::error::UnconstrainedNumeric::Neither;
2178 use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
2180 // Ensure that we apply the conflicting default first
2181 let mut unbound_tyvars = Vec::with_capacity(unbound_vars.len() + 1);
2182 unbound_tyvars.push(conflict);
2183 unbound_tyvars.extend(unbound_vars.iter());
2185 let mut result = None;
2186 // We run the same code as above applying defaults in order, this time when
2187 // we find the conflict we just return it for error reporting above.
2189 // We also run this inside snapshot that never commits so we can do error
2190 // reporting for more then one conflict.
2191 for ty in &unbound_tyvars {
2192 if self.type_var_diverges(ty) {
2193 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
2194 self.tcx.mk_diverging_default());
2196 match self.type_is_unconstrained_numeric(ty) {
2197 UnconstrainedInt => {
2198 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
2200 UnconstrainedFloat => {
2201 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
2204 if let Some(default) = default_map.get(ty) {
2205 let default = default.clone();
2206 match self.eq_types(false,
2207 TypeOrigin::Misc(default.origin_span),
2209 // FIXME(#32730) propagate obligations
2210 Ok(InferOk { obligations, .. }) => assert!(obligations.is_empty()),
2212 result = Some(default);
2224 fn select_all_obligations_or_error(&self) {
2225 debug!("select_all_obligations_or_error");
2227 // upvar inference should have ensured that all deferred call
2228 // resolutions are handled by now.
2229 assert!(self.deferred_call_resolutions.borrow().is_empty());
2231 self.select_all_obligations_and_apply_defaults();
2233 let mut fulfillment_cx = self.fulfillment_cx.borrow_mut();
2235 // Steal the deferred obligations before the fulfillment
2236 // context can turn all of them into errors.
2237 let obligations = fulfillment_cx.take_deferred_obligations();
2238 self.deferred_obligations.borrow_mut().extend(obligations);
2240 match fulfillment_cx.select_all_or_error(self) {
2242 Err(errors) => { self.report_fulfillment_errors(&errors); }
2246 /// Select as many obligations as we can at present.
2247 fn select_obligations_where_possible(&self) {
2248 match self.fulfillment_cx.borrow_mut().select_where_possible(self) {
2250 Err(errors) => { self.report_fulfillment_errors(&errors); }
2254 /// For the overloaded lvalue expressions (`*x`, `x[3]`), the trait
2255 /// returns a type of `&T`, but the actual type we assign to the
2256 /// *expression* is `T`. So this function just peels off the return
2257 /// type by one layer to yield `T`.
2258 fn make_overloaded_lvalue_return_type(&self,
2259 method: MethodCallee<'tcx>)
2260 -> ty::TypeAndMut<'tcx>
2262 // extract method return type, which will be &T;
2263 // all LB regions should have been instantiated during method lookup
2264 let ret_ty = method.ty.fn_ret();
2265 let ret_ty = self.tcx.no_late_bound_regions(&ret_ty).unwrap();
2267 // method returns &T, but the type as visible to user is T, so deref
2268 ret_ty.builtin_deref(true, NoPreference).unwrap()
2271 fn lookup_indexing(&self,
2273 base_expr: &'gcx hir::Expr,
2276 lvalue_pref: LvaluePreference)
2277 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2279 // FIXME(#18741) -- this is almost but not quite the same as the
2280 // autoderef that normal method probing does. They could likely be
2283 let mut autoderef = self.autoderef(base_expr.span, base_ty);
2285 while let Some((adj_ty, autoderefs)) = autoderef.next() {
2286 if let Some(final_mt) = self.try_index_step(
2287 MethodCall::expr(expr.id),
2288 expr, base_expr, adj_ty, autoderefs,
2289 false, lvalue_pref, idx_ty)
2291 autoderef.finalize(lvalue_pref, Some(base_expr));
2292 return Some(final_mt);
2295 if let ty::TyArray(element_ty, _) = adj_ty.sty {
2296 autoderef.finalize(lvalue_pref, Some(base_expr));
2297 let adjusted_ty = self.tcx.mk_slice(element_ty);
2298 return self.try_index_step(
2299 MethodCall::expr(expr.id), expr, base_expr,
2300 adjusted_ty, autoderefs, true, lvalue_pref, idx_ty);
2303 autoderef.unambiguous_final_ty();
2307 /// To type-check `base_expr[index_expr]`, we progressively autoderef
2308 /// (and otherwise adjust) `base_expr`, looking for a type which either
2309 /// supports builtin indexing or overloaded indexing.
2310 /// This loop implements one step in that search; the autoderef loop
2311 /// is implemented by `lookup_indexing`.
2312 fn try_index_step(&self,
2313 method_call: MethodCall,
2315 base_expr: &'gcx hir::Expr,
2316 adjusted_ty: Ty<'tcx>,
2319 lvalue_pref: LvaluePreference,
2321 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2324 debug!("try_index_step(expr={:?}, base_expr.id={:?}, adjusted_ty={:?}, \
2325 autoderefs={}, unsize={}, index_ty={:?})",
2333 let input_ty = self.next_ty_var();
2335 // First, try built-in indexing.
2336 match (adjusted_ty.builtin_index(), &index_ty.sty) {
2337 (Some(ty), &ty::TyUint(ast::UintTy::Us)) | (Some(ty), &ty::TyInfer(ty::IntVar(_))) => {
2338 debug!("try_index_step: success, using built-in indexing");
2339 // If we had `[T; N]`, we should've caught it before unsizing to `[T]`.
2341 self.write_autoderef_adjustment(base_expr.id, autoderefs);
2342 return Some((tcx.types.usize, ty));
2347 // Try `IndexMut` first, if preferred.
2348 let method = match (lvalue_pref, tcx.lang_items.index_mut_trait()) {
2349 (PreferMutLvalue, Some(trait_did)) => {
2350 self.lookup_method_in_trait_adjusted(expr.span,
2352 token::intern("index_mut"),
2357 Some(vec![input_ty]))
2362 // Otherwise, fall back to `Index`.
2363 let method = match (method, tcx.lang_items.index_trait()) {
2364 (None, Some(trait_did)) => {
2365 self.lookup_method_in_trait_adjusted(expr.span,
2367 token::intern("index"),
2372 Some(vec![input_ty]))
2374 (method, _) => method,
2377 // If some lookup succeeds, write callee into table and extract index/element
2378 // type from the method signature.
2379 // If some lookup succeeded, install method in table
2380 method.map(|method| {
2381 debug!("try_index_step: success, using overloaded indexing");
2382 self.tables.borrow_mut().method_map.insert(method_call, method);
2383 (input_ty, self.make_overloaded_lvalue_return_type(method).ty)
2387 fn check_method_argument_types(&self,
2389 method_fn_ty: Ty<'tcx>,
2390 callee_expr: &'gcx hir::Expr,
2391 args_no_rcvr: &'gcx [P<hir::Expr>],
2392 tuple_arguments: TupleArgumentsFlag,
2393 expected: Expectation<'tcx>)
2395 if method_fn_ty.references_error() {
2396 let err_inputs = self.err_args(args_no_rcvr.len());
2398 let err_inputs = match tuple_arguments {
2399 DontTupleArguments => err_inputs,
2400 TupleArguments => vec![self.tcx.mk_tup(err_inputs)],
2403 self.check_argument_types(sp, &err_inputs[..], &[], args_no_rcvr,
2404 false, tuple_arguments);
2407 match method_fn_ty.sty {
2408 ty::TyFnDef(_, _, ref fty) => {
2409 // HACK(eddyb) ignore self in the definition (see above).
2410 let expected_arg_tys = self.expected_types_for_fn_args(sp, expected,
2412 &fty.sig.0.inputs[1..]);
2413 self.check_argument_types(sp, &fty.sig.0.inputs[1..], &expected_arg_tys[..],
2414 args_no_rcvr, fty.sig.0.variadic, tuple_arguments);
2418 span_bug!(callee_expr.span, "method without bare fn type");
2424 /// Generic function that factors out common logic from function calls,
2425 /// method calls and overloaded operators.
2426 fn check_argument_types(&self,
2428 fn_inputs: &[Ty<'tcx>],
2429 expected_arg_tys: &[Ty<'tcx>],
2430 args: &'gcx [P<hir::Expr>],
2432 tuple_arguments: TupleArgumentsFlag) {
2435 // Grab the argument types, supplying fresh type variables
2436 // if the wrong number of arguments were supplied
2437 let supplied_arg_count = if tuple_arguments == DontTupleArguments {
2443 // All the input types from the fn signature must outlive the call
2444 // so as to validate implied bounds.
2445 for &fn_input_ty in fn_inputs {
2446 self.register_wf_obligation(fn_input_ty, sp, traits::MiscObligation);
2449 let mut expected_arg_tys = expected_arg_tys;
2450 let expected_arg_count = fn_inputs.len();
2452 fn parameter_count_error<'tcx>(sess: &Session, sp: Span, fn_inputs: &[Ty<'tcx>],
2453 expected_count: usize, arg_count: usize, error_code: &str,
2455 let mut err = sess.struct_span_err_with_code(sp,
2456 &format!("this function takes {}{} parameter{} but {} parameter{} supplied",
2457 if variadic {"at least "} else {""},
2459 if expected_count == 1 {""} else {"s"},
2461 if arg_count == 1 {" was"} else {"s were"}),
2464 err.span_label(sp, &format!("expected {}{} parameter{}",
2465 if variadic {"at least "} else {""},
2467 if expected_count == 1 {""} else {"s"}));
2469 let input_types = fn_inputs.iter().map(|i| format!("{:?}", i)).collect::<Vec<String>>();
2470 if input_types.len() > 0 {
2471 err.note(&format!("the following parameter type{} expected: {}",
2472 if expected_count == 1 {" was"} else {"s were"},
2473 input_types.join(", ")));
2478 let formal_tys = if tuple_arguments == TupleArguments {
2479 let tuple_type = self.structurally_resolved_type(sp, fn_inputs[0]);
2480 match tuple_type.sty {
2481 ty::TyTuple(arg_types) if arg_types.len() != args.len() => {
2482 parameter_count_error(tcx.sess, sp, fn_inputs, arg_types.len(), args.len(),
2484 expected_arg_tys = &[];
2485 self.err_args(args.len())
2487 ty::TyTuple(arg_types) => {
2488 expected_arg_tys = match expected_arg_tys.get(0) {
2489 Some(&ty) => match ty.sty {
2490 ty::TyTuple(ref tys) => &tys,
2498 span_err!(tcx.sess, sp, E0059,
2499 "cannot use call notation; the first type parameter \
2500 for the function trait is neither a tuple nor unit");
2501 expected_arg_tys = &[];
2502 self.err_args(args.len())
2505 } else if expected_arg_count == supplied_arg_count {
2507 } else if variadic {
2508 if supplied_arg_count >= expected_arg_count {
2511 parameter_count_error(tcx.sess, sp, fn_inputs, expected_arg_count,
2512 supplied_arg_count, "E0060", true);
2513 expected_arg_tys = &[];
2514 self.err_args(supplied_arg_count)
2517 parameter_count_error(tcx.sess, sp, fn_inputs, expected_arg_count, supplied_arg_count,
2519 expected_arg_tys = &[];
2520 self.err_args(supplied_arg_count)
2523 debug!("check_argument_types: formal_tys={:?}",
2524 formal_tys.iter().map(|t| self.ty_to_string(*t)).collect::<Vec<String>>());
2526 // Check the arguments.
2527 // We do this in a pretty awful way: first we typecheck any arguments
2528 // that are not anonymous functions, then we typecheck the anonymous
2529 // functions. This is so that we have more information about the types
2530 // of arguments when we typecheck the functions. This isn't really the
2531 // right way to do this.
2532 let xs = [false, true];
2533 let mut any_diverges = false; // has any of the arguments diverged?
2534 let mut warned = false; // have we already warned about unreachable code?
2535 for check_blocks in &xs {
2536 let check_blocks = *check_blocks;
2537 debug!("check_blocks={}", check_blocks);
2539 // More awful hacks: before we check argument types, try to do
2540 // an "opportunistic" vtable resolution of any trait bounds on
2541 // the call. This helps coercions.
2543 self.select_obligations_where_possible();
2546 // For variadic functions, we don't have a declared type for all of
2547 // the arguments hence we only do our usual type checking with
2548 // the arguments who's types we do know.
2549 let t = if variadic {
2551 } else if tuple_arguments == TupleArguments {
2556 for (i, arg) in args.iter().take(t).enumerate() {
2557 if any_diverges && !warned {
2560 .add_lint(lint::builtin::UNREACHABLE_CODE,
2563 "unreachable expression".to_string());
2566 let is_block = match arg.node {
2567 hir::ExprClosure(..) => true,
2571 if is_block == check_blocks {
2572 debug!("checking the argument");
2573 let formal_ty = formal_tys[i];
2575 // The special-cased logic below has three functions:
2576 // 1. Provide as good of an expected type as possible.
2577 let expected = expected_arg_tys.get(i).map(|&ty| {
2578 Expectation::rvalue_hint(self, ty)
2581 self.check_expr_with_expectation(&arg,
2582 expected.unwrap_or(ExpectHasType(formal_ty)));
2583 // 2. Coerce to the most detailed type that could be coerced
2584 // to, which is `expected_ty` if `rvalue_hint` returns an
2585 // `ExpectHasType(expected_ty)`, or the `formal_ty` otherwise.
2586 let coerce_ty = expected.and_then(|e| e.only_has_type(self));
2587 self.demand_coerce(&arg, coerce_ty.unwrap_or(formal_ty));
2589 // 3. Relate the expected type and the formal one,
2590 // if the expected type was used for the coercion.
2591 coerce_ty.map(|ty| self.demand_suptype(arg.span, formal_ty, ty));
2594 if let Some(&arg_ty) = self.tables.borrow().node_types.get(&arg.id) {
2595 // FIXME(canndrew): This is_never should probably be an is_uninhabited
2596 any_diverges = any_diverges ||
2597 self.type_var_diverges(arg_ty) ||
2601 if any_diverges && !warned {
2602 let parent = self.tcx.map.get_parent_node(args[0].id);
2605 .add_lint(lint::builtin::UNREACHABLE_CODE,
2608 "unreachable call".to_string());
2614 // We also need to make sure we at least write the ty of the other
2615 // arguments which we skipped above.
2617 for arg in args.iter().skip(expected_arg_count) {
2618 self.check_expr(&arg);
2620 // There are a few types which get autopromoted when passed via varargs
2621 // in C but we just error out instead and require explicit casts.
2622 let arg_ty = self.structurally_resolved_type(arg.span,
2623 self.expr_ty(&arg));
2625 ty::TyFloat(ast::FloatTy::F32) => {
2626 self.type_error_message(arg.span, |t| {
2627 format!("can't pass an `{}` to variadic \
2628 function, cast to `c_double`", t)
2631 ty::TyInt(ast::IntTy::I8) | ty::TyInt(ast::IntTy::I16) | ty::TyBool => {
2632 self.type_error_message(arg.span, |t| {
2633 format!("can't pass `{}` to variadic \
2634 function, cast to `c_int`",
2638 ty::TyUint(ast::UintTy::U8) | ty::TyUint(ast::UintTy::U16) => {
2639 self.type_error_message(arg.span, |t| {
2640 format!("can't pass `{}` to variadic \
2641 function, cast to `c_uint`",
2645 ty::TyFnDef(_, _, f) => {
2646 let ptr_ty = self.tcx.mk_fn_ptr(f);
2647 let ptr_ty = self.resolve_type_vars_if_possible(&ptr_ty);
2648 self.type_error_message(arg.span,
2650 format!("can't pass `{}` to variadic \
2651 function, cast to `{}`", t, ptr_ty)
2660 fn err_args(&self, len: usize) -> Vec<Ty<'tcx>> {
2661 (0..len).map(|_| self.tcx.types.err).collect()
2664 fn write_call(&self,
2665 call_expr: &hir::Expr,
2667 self.write_ty(call_expr.id, output);
2670 // AST fragment checking
2673 expected: Expectation<'tcx>)
2679 ast::LitKind::Str(..) => tcx.mk_static_str(),
2680 ast::LitKind::ByteStr(ref v) => {
2681 tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic),
2682 tcx.mk_array(tcx.types.u8, v.len()))
2684 ast::LitKind::Byte(_) => tcx.types.u8,
2685 ast::LitKind::Char(_) => tcx.types.char,
2686 ast::LitKind::Int(_, ast::LitIntType::Signed(t)) => tcx.mk_mach_int(t),
2687 ast::LitKind::Int(_, ast::LitIntType::Unsigned(t)) => tcx.mk_mach_uint(t),
2688 ast::LitKind::Int(_, ast::LitIntType::Unsuffixed) => {
2689 let opt_ty = expected.to_option(self).and_then(|ty| {
2691 ty::TyInt(_) | ty::TyUint(_) => Some(ty),
2692 ty::TyChar => Some(tcx.types.u8),
2693 ty::TyRawPtr(..) => Some(tcx.types.usize),
2694 ty::TyFnDef(..) | ty::TyFnPtr(_) => Some(tcx.types.usize),
2698 opt_ty.unwrap_or_else(
2699 || tcx.mk_int_var(self.next_int_var_id()))
2701 ast::LitKind::Float(_, t) => tcx.mk_mach_float(t),
2702 ast::LitKind::FloatUnsuffixed(_) => {
2703 let opt_ty = expected.to_option(self).and_then(|ty| {
2705 ty::TyFloat(_) => Some(ty),
2709 opt_ty.unwrap_or_else(
2710 || tcx.mk_float_var(self.next_float_var_id()))
2712 ast::LitKind::Bool(_) => tcx.types.bool
2716 fn check_expr_eq_type(&self,
2717 expr: &'gcx hir::Expr,
2718 expected: Ty<'tcx>) {
2719 self.check_expr_with_hint(expr, expected);
2720 self.demand_eqtype(expr.span, expected, self.expr_ty(expr));
2723 pub fn check_expr_has_type(&self,
2724 expr: &'gcx hir::Expr,
2725 expected: Ty<'tcx>) {
2726 self.check_expr_with_hint(expr, expected);
2727 self.demand_suptype(expr.span, expected, self.expr_ty(expr));
2730 fn check_expr_coercable_to_type(&self,
2731 expr: &'gcx hir::Expr,
2732 expected: Ty<'tcx>) {
2733 self.check_expr_with_hint(expr, expected);
2734 self.demand_coerce(expr, expected);
2737 fn check_expr_with_hint(&self, expr: &'gcx hir::Expr,
2738 expected: Ty<'tcx>) {
2739 self.check_expr_with_expectation(expr, ExpectHasType(expected))
2742 fn check_expr_with_expectation(&self,
2743 expr: &'gcx hir::Expr,
2744 expected: Expectation<'tcx>) {
2745 self.check_expr_with_expectation_and_lvalue_pref(expr, expected, NoPreference)
2748 fn check_expr(&self, expr: &'gcx hir::Expr) {
2749 self.check_expr_with_expectation(expr, NoExpectation)
2752 fn check_expr_with_lvalue_pref(&self, expr: &'gcx hir::Expr,
2753 lvalue_pref: LvaluePreference) {
2754 self.check_expr_with_expectation_and_lvalue_pref(expr, NoExpectation, lvalue_pref)
2757 // determine the `self` type, using fresh variables for all variables
2758 // declared on the impl declaration e.g., `impl<A,B> for Vec<(A,B)>`
2759 // would return ($0, $1) where $0 and $1 are freshly instantiated type
2761 pub fn impl_self_ty(&self,
2762 span: Span, // (potential) receiver for this impl
2764 -> TypeAndSubsts<'tcx> {
2765 let ity = self.tcx.lookup_item_type(did);
2766 debug!("impl_self_ty: ity={:?}", ity);
2768 let substs = self.fresh_substs_for_item(span, did);
2769 let substd_ty = self.instantiate_type_scheme(span, &substs, &ity.ty);
2771 TypeAndSubsts { substs: substs, ty: substd_ty }
2774 /// Unifies the return type with the expected type early, for more coercions
2775 /// and forward type information on the argument expressions.
2776 fn expected_types_for_fn_args(&self,
2778 expected_ret: Expectation<'tcx>,
2779 formal_ret: Ty<'tcx>,
2780 formal_args: &[Ty<'tcx>])
2782 let expected_args = expected_ret.only_has_type(self).and_then(|ret_ty| {
2783 self.commit_regions_if_ok(|| {
2784 // Attempt to apply a subtyping relationship between the formal
2785 // return type (likely containing type variables if the function
2786 // is polymorphic) and the expected return type.
2787 // No argument expectations are produced if unification fails.
2788 let origin = TypeOrigin::Misc(call_span);
2789 let ures = self.sub_types(false, origin, formal_ret, ret_ty);
2790 // FIXME(#15760) can't use try! here, FromError doesn't default
2791 // to identity so the resulting type is not constrained.
2793 // FIXME(#32730) propagate obligations
2794 Ok(InferOk { obligations, .. }) => assert!(obligations.is_empty()),
2795 Err(e) => return Err(e),
2798 // Record all the argument types, with the substitutions
2799 // produced from the above subtyping unification.
2800 Ok(formal_args.iter().map(|ty| {
2801 self.resolve_type_vars_if_possible(ty)
2804 }).unwrap_or(vec![]);
2805 debug!("expected_types_for_fn_args(formal={:?} -> {:?}, expected={:?} -> {:?})",
2806 formal_args, formal_ret,
2807 expected_args, expected_ret);
2811 // Checks a method call.
2812 fn check_method_call(&self,
2813 expr: &'gcx hir::Expr,
2814 method_name: Spanned<ast::Name>,
2815 args: &'gcx [P<hir::Expr>],
2817 expected: Expectation<'tcx>,
2818 lvalue_pref: LvaluePreference) {
2819 let rcvr = &args[0];
2820 self.check_expr_with_lvalue_pref(&rcvr, lvalue_pref);
2822 // no need to check for bot/err -- callee does that
2823 let expr_t = self.structurally_resolved_type(expr.span, self.expr_ty(&rcvr));
2825 let tps = tps.iter().map(|ast_ty| self.to_ty(&ast_ty)).collect::<Vec<_>>();
2826 let fn_ty = match self.lookup_method(method_name.span,
2833 let method_ty = method.ty;
2834 let method_call = MethodCall::expr(expr.id);
2835 self.tables.borrow_mut().method_map.insert(method_call, method);
2839 if method_name.node != keywords::Invalid.name() {
2840 self.report_method_error(method_name.span, expr_t,
2841 method_name.node, Some(rcvr), error);
2843 self.write_error(expr.id);
2848 // Call the generic checker.
2849 let ret_ty = self.check_method_argument_types(method_name.span, fn_ty,
2854 self.write_call(expr, ret_ty);
2857 // A generic function for checking the then and else in an if
2859 fn check_then_else(&self,
2860 cond_expr: &'gcx hir::Expr,
2861 then_blk: &'gcx hir::Block,
2862 opt_else_expr: Option<&'gcx hir::Expr>,
2865 expected: Expectation<'tcx>) {
2866 self.check_expr_has_type(cond_expr, self.tcx.types.bool);
2868 let expected = expected.adjust_for_branches(self);
2869 self.check_block_with_expected(then_blk, expected);
2870 let then_ty = self.node_ty(then_blk.id);
2872 let unit = self.tcx.mk_nil();
2873 let (origin, expected, found, result) =
2874 if let Some(else_expr) = opt_else_expr {
2875 self.check_expr_with_expectation(else_expr, expected);
2876 let else_ty = self.expr_ty(else_expr);
2877 let origin = TypeOrigin::IfExpression(sp);
2879 // Only try to coerce-unify if we have a then expression
2880 // to assign coercions to, otherwise it's () or diverging.
2881 let result = if let Some(ref then) = then_blk.expr {
2882 let res = self.try_find_coercion_lub(origin, || Some(&**then),
2883 then_ty, else_expr);
2885 // In case we did perform an adjustment, we have to update
2886 // the type of the block, because old trans still uses it.
2887 let adj = self.tables.borrow().adjustments.get(&then.id).cloned();
2888 if res.is_ok() && adj.is_some() {
2889 self.write_ty(then_blk.id, self.adjust_expr_ty(then, adj.as_ref()));
2894 self.commit_if_ok(|_| {
2895 let trace = TypeTrace::types(origin, true, then_ty, else_ty);
2896 self.lub(true, trace, &then_ty, &else_ty)
2897 .map(|InferOk { value, obligations }| {
2898 // FIXME(#32730) propagate obligations
2899 assert!(obligations.is_empty());
2904 (origin, then_ty, else_ty, result)
2906 let origin = TypeOrigin::IfExpressionWithNoElse(sp);
2907 (origin, unit, then_ty,
2908 self.eq_types(true, origin, unit, then_ty)
2909 .map(|InferOk { obligations, .. }| {
2910 // FIXME(#32730) propagate obligations
2911 assert!(obligations.is_empty());
2916 let if_ty = match result {
2918 if self.expr_ty(cond_expr).references_error() {
2925 self.report_mismatched_types(origin, expected, found, e);
2930 self.write_ty(id, if_ty);
2933 // Check field access expressions
2934 fn check_field(&self,
2935 expr: &'gcx hir::Expr,
2936 lvalue_pref: LvaluePreference,
2937 base: &'gcx hir::Expr,
2938 field: &Spanned<ast::Name>) {
2939 self.check_expr_with_lvalue_pref(base, lvalue_pref);
2940 let expr_t = self.structurally_resolved_type(expr.span,
2941 self.expr_ty(base));
2942 let mut private_candidate = None;
2943 let mut autoderef = self.autoderef(expr.span, expr_t);
2944 while let Some((base_t, autoderefs)) = autoderef.next() {
2945 if let ty::TyStruct(base_def, substs) = base_t.sty {
2946 debug!("struct named {:?}", base_t);
2947 if let Some(field) = base_def.struct_variant().find_field_named(field.node) {
2948 let field_ty = self.field_ty(expr.span, field, substs);
2949 if field.vis.is_accessible_from(self.body_id, &self.tcx().map) {
2950 autoderef.finalize(lvalue_pref, Some(base));
2951 self.write_ty(expr.id, field_ty);
2952 self.write_autoderef_adjustment(base.id, autoderefs);
2955 private_candidate = Some((base_def.did, field_ty));
2959 autoderef.unambiguous_final_ty();
2961 if let Some((did, field_ty)) = private_candidate {
2962 let struct_path = self.tcx().item_path_str(did);
2963 self.write_ty(expr.id, field_ty);
2964 let msg = format!("field `{}` of struct `{}` is private", field.node, struct_path);
2965 let mut err = self.tcx().sess.struct_span_err(expr.span, &msg);
2966 // Also check if an accessible method exists, which is often what is meant.
2967 if self.method_exists(field.span, field.node, expr_t, expr.id, false) {
2968 err.note(&format!("a method `{}` also exists, perhaps you wish to call it",
2972 } else if field.node == keywords::Invalid.name() {
2973 self.write_error(expr.id);
2974 } else if self.method_exists(field.span, field.node, expr_t, expr.id, true) {
2975 self.type_error_struct(field.span, |actual| {
2976 format!("attempted to take value of method `{}` on type \
2977 `{}`", field.node, actual)
2979 .help("maybe a `()` to call it is missing? \
2980 If not, try an anonymous function")
2982 self.write_error(expr.id);
2984 let mut err = self.type_error_struct(expr.span, |actual| {
2985 format!("attempted access of field `{}` on type `{}`, \
2986 but no field with that name was found",
2989 if let ty::TyRawPtr(..) = expr_t.sty {
2990 err.note(&format!("`{0}` is a native pointer; perhaps you need to deref with \
2991 `(*{0}).{1}`", pprust::expr_to_string(base), field.node));
2993 if let ty::TyStruct(def, _) = expr_t.sty {
2994 Self::suggest_field_names(&mut err, def.struct_variant(), field, vec![]);
2997 self.write_error(expr.id);
3001 // displays hints about the closest matches in field names
3002 fn suggest_field_names(err: &mut DiagnosticBuilder,
3003 variant: ty::VariantDef<'tcx>,
3004 field: &Spanned<ast::Name>,
3005 skip : Vec<InternedString>) {
3006 let name = field.node.as_str();
3007 let names = variant.fields.iter().filter_map(|field| {
3008 // ignore already set fields and private fields from non-local crates
3009 if skip.iter().any(|x| *x == field.name.as_str()) ||
3010 (variant.did.krate != LOCAL_CRATE && field.vis != Visibility::Public) {
3017 // only find fits with at least one matching letter
3018 if let Some(name) = find_best_match_for_name(names, &name, Some(name.len())) {
3019 err.span_help(field.span,
3020 &format!("did you mean `{}`?", name));
3024 // Check tuple index expressions
3025 fn check_tup_field(&self,
3026 expr: &'gcx hir::Expr,
3027 lvalue_pref: LvaluePreference,
3028 base: &'gcx hir::Expr,
3029 idx: codemap::Spanned<usize>) {
3030 self.check_expr_with_lvalue_pref(base, lvalue_pref);
3031 let expr_t = self.structurally_resolved_type(expr.span,
3032 self.expr_ty(base));
3033 let mut private_candidate = None;
3034 let mut tuple_like = false;
3035 let mut autoderef = self.autoderef(expr.span, expr_t);
3036 while let Some((base_t, autoderefs)) = autoderef.next() {
3037 let field = match base_t.sty {
3038 ty::TyStruct(base_def, substs) => {
3039 tuple_like = base_def.struct_variant().kind == ty::VariantKind::Tuple;
3040 if !tuple_like { continue }
3042 debug!("tuple struct named {:?}", base_t);
3043 base_def.struct_variant().fields.get(idx.node).and_then(|field| {
3044 let field_ty = self.field_ty(expr.span, field, substs);
3045 private_candidate = Some((base_def.did, field_ty));
3046 if field.vis.is_accessible_from(self.body_id, &self.tcx().map) {
3053 ty::TyTuple(ref v) => {
3055 v.get(idx.node).cloned()
3060 if let Some(field_ty) = field {
3061 autoderef.finalize(lvalue_pref, Some(base));
3062 self.write_ty(expr.id, field_ty);
3063 self.write_autoderef_adjustment(base.id, autoderefs);
3067 autoderef.unambiguous_final_ty();
3069 if let Some((did, field_ty)) = private_candidate {
3070 let struct_path = self.tcx().item_path_str(did);
3071 let msg = format!("field `{}` of struct `{}` is private", idx.node, struct_path);
3072 self.tcx().sess.span_err(expr.span, &msg);
3073 self.write_ty(expr.id, field_ty);
3077 self.type_error_message(
3081 format!("attempted out-of-bounds tuple index `{}` on \
3086 format!("attempted tuple index `{}` on type `{}`, but the \
3087 type was not a tuple or tuple struct",
3094 self.write_error(expr.id);
3097 fn report_unknown_field(&self,
3099 variant: ty::VariantDef<'tcx>,
3101 skip_fields: &[hir::Field]) {
3102 let mut err = self.type_error_struct_with_diag(
3104 |actual| if let ty::TyEnum(..) = ty.sty {
3105 struct_span_err!(self.tcx.sess, field.name.span, E0559,
3106 "struct variant `{}::{}` has no field named `{}`",
3107 actual, variant.name.as_str(), field.name.node)
3109 struct_span_err!(self.tcx.sess, field.name.span, E0560,
3110 "structure `{}` has no field named `{}`",
3111 actual, field.name.node)
3114 // prevent all specified fields from being suggested
3115 let skip_fields = skip_fields.iter().map(|ref x| x.name.node.as_str());
3116 Self::suggest_field_names(&mut err, variant, &field.name, skip_fields.collect());
3120 fn check_expr_struct_fields(&self,
3123 variant: ty::VariantDef<'tcx>,
3124 ast_fields: &'gcx [hir::Field],
3125 check_completeness: bool) {
3127 let substs = match adt_ty.sty {
3128 ty::TyStruct(_, substs) | ty::TyEnum(_, substs) => substs,
3129 _ => span_bug!(span, "non-ADT passed to check_expr_struct_fields")
3132 let mut remaining_fields = FnvHashMap();
3133 for field in &variant.fields {
3134 remaining_fields.insert(field.name, field);
3137 let mut seen_fields = FnvHashMap();
3139 let mut error_happened = false;
3141 // Typecheck each field.
3142 for field in ast_fields {
3143 let expected_field_type;
3145 if let Some(v_field) = remaining_fields.remove(&field.name.node) {
3146 expected_field_type = self.field_ty(field.span, v_field, substs);
3148 seen_fields.insert(field.name.node, field.span);
3150 error_happened = true;
3151 expected_field_type = tcx.types.err;
3152 if let Some(_) = variant.find_field_named(field.name.node) {
3153 let mut err = struct_span_err!(self.tcx.sess,
3156 "field `{}` specified more than once",
3159 err.span_label(field.name.span, &format!("used more than once"));
3161 if let Some(prev_span) = seen_fields.get(&field.name.node) {
3162 err.span_label(*prev_span, &format!("first use of `{}`", field.name.node));
3167 self.report_unknown_field(adt_ty, variant, field, ast_fields);
3171 // Make sure to give a type to the field even if there's
3172 // an error, so we can continue typechecking
3173 self.check_expr_coercable_to_type(&field.expr, expected_field_type);
3176 // Make sure the programmer specified all the fields.
3177 if check_completeness &&
3179 !remaining_fields.is_empty()
3181 span_err!(tcx.sess, span, E0063,
3182 "missing field{} {} in initializer of `{}`",
3183 if remaining_fields.len() == 1 {""} else {"s"},
3184 remaining_fields.keys()
3185 .map(|n| format!("`{}`", n))
3186 .collect::<Vec<_>>()
3193 fn check_struct_fields_on_error(&self,
3195 fields: &'gcx [hir::Field],
3196 base_expr: &'gcx Option<P<hir::Expr>>) {
3197 // Make sure to still write the types
3198 // otherwise we might ICE
3199 self.write_error(id);
3200 for field in fields {
3201 self.check_expr(&field.expr);
3204 Some(ref base) => self.check_expr(&base),
3209 pub fn check_struct_path(&self,
3211 node_id: ast::NodeId,
3213 -> Option<(ty::VariantDef<'tcx>, Ty<'tcx>)> {
3214 let def = self.finish_resolving_struct_path(path, node_id, span);
3215 let variant = match def {
3217 self.set_tainted_by_errors();
3220 Def::Variant(type_did, _) | Def::Struct(type_did) => {
3221 Some((type_did, self.tcx.expect_variant_def(def)))
3223 Def::TyAlias(did) => {
3224 if let Some(&ty::TyStruct(adt, _)) = self.tcx.opt_lookup_item_type(did)
3225 .map(|scheme| &scheme.ty.sty) {
3226 Some((did, adt.struct_variant()))
3234 if let Some((def_id, variant)) = variant {
3235 if variant.kind == ty::VariantKind::Tuple &&
3236 !self.tcx.sess.features.borrow().relaxed_adts {
3237 emit_feature_err(&self.tcx.sess.parse_sess.span_diagnostic,
3238 "relaxed_adts", span, GateIssue::Language,
3239 "tuple structs and variants in struct patterns are unstable");
3241 let ty = self.instantiate_type_path(def_id, path, node_id);
3244 struct_span_err!(self.tcx.sess, path.span, E0071,
3245 "`{}` does not name a struct or a struct variant",
3246 pprust::path_to_string(path))
3247 .span_label(path.span, &format!("not a struct"))
3253 fn check_expr_struct(&self,
3256 fields: &'gcx [hir::Field],
3257 base_expr: &'gcx Option<P<hir::Expr>>)
3259 // Find the relevant variant
3260 let (variant, expr_ty) = if let Some(variant_ty) = self.check_struct_path(path, expr.id,
3264 self.check_struct_fields_on_error(expr.id, fields, base_expr);
3268 self.check_expr_struct_fields(expr_ty, path.span, variant, fields,
3269 base_expr.is_none());
3270 if let &Some(ref base_expr) = base_expr {
3271 self.check_expr_has_type(base_expr, expr_ty);
3273 ty::TyStruct(adt, substs) => {
3274 self.tables.borrow_mut().fru_field_types.insert(
3276 adt.struct_variant().fields.iter().map(|f| {
3277 self.normalize_associated_types_in(
3278 expr.span, &f.ty(self.tcx, substs)
3284 span_err!(self.tcx.sess, base_expr.span, E0436,
3285 "functional record update syntax requires a struct");
3293 /// If an expression has any sub-expressions that result in a type error,
3294 /// inspecting that expression's type with `ty.references_error()` will return
3295 /// true. Likewise, if an expression is known to diverge, inspecting its
3296 /// type with `ty::type_is_bot` will return true (n.b.: since Rust is
3297 /// strict, _|_ can appear in the type of an expression that does not,
3298 /// itself, diverge: for example, fn() -> _|_.)
3299 /// Note that inspecting a type's structure *directly* may expose the fact
3300 /// that there are actually multiple representations for `TyError`, so avoid
3301 /// that when err needs to be handled differently.
3302 fn check_expr_with_expectation_and_lvalue_pref(&self,
3303 expr: &'gcx hir::Expr,
3304 expected: Expectation<'tcx>,
3305 lvalue_pref: LvaluePreference) {
3306 debug!(">> typechecking: expr={:?} expected={:?}",
3312 hir::ExprBox(ref subexpr) => {
3313 let expected_inner = expected.to_option(self).map_or(NoExpectation, |ty| {
3315 ty::TyBox(ty) => Expectation::rvalue_hint(self, ty),
3319 self.check_expr_with_expectation(subexpr, expected_inner);
3320 let referent_ty = self.expr_ty(&subexpr);
3321 self.write_ty(id, tcx.mk_box(referent_ty));
3324 hir::ExprLit(ref lit) => {
3325 let typ = self.check_lit(&lit, expected);
3326 self.write_ty(id, typ);
3328 hir::ExprBinary(op, ref lhs, ref rhs) => {
3329 self.check_binop(expr, op, lhs, rhs);
3331 hir::ExprAssignOp(op, ref lhs, ref rhs) => {
3332 self.check_binop_assign(expr, op, lhs, rhs);
3334 hir::ExprUnary(unop, ref oprnd) => {
3335 let expected_inner = match unop {
3336 hir::UnNot | hir::UnNeg => {
3343 let lvalue_pref = match unop {
3344 hir::UnDeref => lvalue_pref,
3347 self.check_expr_with_expectation_and_lvalue_pref(&oprnd,
3350 let mut oprnd_t = self.expr_ty(&oprnd);
3352 if !oprnd_t.references_error() {
3355 oprnd_t = self.structurally_resolved_type(expr.span, oprnd_t);
3357 if let Some(mt) = oprnd_t.builtin_deref(true, NoPreference) {
3359 } else if let Some(method) = self.try_overloaded_deref(
3360 expr.span, Some(&oprnd), oprnd_t, lvalue_pref) {
3361 oprnd_t = self.make_overloaded_lvalue_return_type(method).ty;
3362 self.tables.borrow_mut().method_map.insert(MethodCall::expr(expr.id),
3365 self.type_error_message(expr.span, |actual| {
3366 format!("type `{}` cannot be \
3367 dereferenced", actual)
3369 oprnd_t = tcx.types.err;
3373 oprnd_t = self.structurally_resolved_type(oprnd.span,
3375 if !(oprnd_t.is_integral() || oprnd_t.sty == ty::TyBool) {
3376 oprnd_t = self.check_user_unop("!", "not",
3377 tcx.lang_items.not_trait(),
3378 expr, &oprnd, oprnd_t, unop);
3382 oprnd_t = self.structurally_resolved_type(oprnd.span,
3384 if !(oprnd_t.is_integral() || oprnd_t.is_fp()) {
3385 oprnd_t = self.check_user_unop("-", "neg",
3386 tcx.lang_items.neg_trait(),
3387 expr, &oprnd, oprnd_t, unop);
3392 self.write_ty(id, oprnd_t);
3394 hir::ExprAddrOf(mutbl, ref oprnd) => {
3395 let hint = expected.only_has_type(self).map_or(NoExpectation, |ty| {
3397 ty::TyRef(_, ref mt) | ty::TyRawPtr(ref mt) => {
3398 if self.tcx.expr_is_lval(&oprnd) {
3399 // Lvalues may legitimately have unsized types.
3400 // For example, dereferences of a fat pointer and
3401 // the last field of a struct can be unsized.
3402 ExpectHasType(mt.ty)
3404 Expectation::rvalue_hint(self, mt.ty)
3410 let lvalue_pref = LvaluePreference::from_mutbl(mutbl);
3411 self.check_expr_with_expectation_and_lvalue_pref(&oprnd, hint, lvalue_pref);
3413 let tm = ty::TypeAndMut { ty: self.expr_ty(&oprnd), mutbl: mutbl };
3414 let oprnd_t = if tm.ty.references_error() {
3417 // Note: at this point, we cannot say what the best lifetime
3418 // is to use for resulting pointer. We want to use the
3419 // shortest lifetime possible so as to avoid spurious borrowck
3420 // errors. Moreover, the longest lifetime will depend on the
3421 // precise details of the value whose address is being taken
3422 // (and how long it is valid), which we don't know yet until type
3423 // inference is complete.
3425 // Therefore, here we simply generate a region variable. The
3426 // region inferencer will then select the ultimate value.
3427 // Finally, borrowck is charged with guaranteeing that the
3428 // value whose address was taken can actually be made to live
3429 // as long as it needs to live.
3430 let region = self.next_region_var(infer::AddrOfRegion(expr.span));
3431 tcx.mk_ref(region, tm)
3433 self.write_ty(id, oprnd_t);
3435 hir::ExprPath(ref opt_qself, ref path) => {
3436 let opt_self_ty = opt_qself.as_ref().map(|qself| self.to_ty(&qself.ty));
3437 let (def, opt_ty, segments) = self.resolve_ty_and_def_ufcs(opt_self_ty, path,
3438 expr.id, expr.span);
3439 if def != Def::Err {
3440 self.instantiate_value_path(segments, opt_ty, def, expr.span, id);
3442 self.set_tainted_by_errors();
3443 self.write_error(id);
3446 // We always require that the type provided as the value for
3447 // a type parameter outlives the moment of instantiation.
3448 self.opt_node_ty_substs(expr.id, |item_substs| {
3449 self.add_wf_bounds(&item_substs.substs, expr);
3452 hir::ExprInlineAsm(_, ref outputs, ref inputs) => {
3453 for output in outputs {
3454 self.check_expr(output);
3456 for input in inputs {
3457 self.check_expr(input);
3461 hir::ExprBreak(_) => { self.write_never(id); }
3462 hir::ExprAgain(_) => { self.write_never(id); }
3463 hir::ExprRet(ref expr_opt) => {
3464 if let Some(ref e) = *expr_opt {
3465 self.check_expr_coercable_to_type(&e, self.ret_ty);
3467 let eq_result = self.eq_types(false,
3468 TypeOrigin::Misc(expr.span),
3471 // FIXME(#32730) propagate obligations
3472 .map(|InferOk { obligations, .. }| assert!(obligations.is_empty()));
3473 if eq_result.is_err() {
3474 struct_span_err!(tcx.sess, expr.span, E0069,
3475 "`return;` in a function whose return type is not `()`")
3476 .span_label(expr.span, &format!("return type is not ()"))
3480 self.write_never(id);
3482 hir::ExprAssign(ref lhs, ref rhs) => {
3483 self.check_expr_with_lvalue_pref(&lhs, PreferMutLvalue);
3486 if !tcx.expr_is_lval(&lhs) {
3488 tcx.sess, expr.span, E0070,
3489 "invalid left-hand side expression")
3492 &format!("left-hand of expression not valid"))
3496 let lhs_ty = self.expr_ty(&lhs);
3497 self.check_expr_coercable_to_type(&rhs, lhs_ty);
3498 let rhs_ty = self.expr_ty(&rhs);
3500 self.require_expr_have_sized_type(&lhs, traits::AssignmentLhsSized);
3502 if lhs_ty.references_error() || rhs_ty.references_error() {
3503 self.write_error(id);
3508 hir::ExprIf(ref cond, ref then_blk, ref opt_else_expr) => {
3509 self.check_then_else(&cond, &then_blk, opt_else_expr.as_ref().map(|e| &**e),
3510 id, expr.span, expected);
3512 hir::ExprWhile(ref cond, ref body, _) => {
3513 self.check_expr_has_type(&cond, tcx.types.bool);
3514 self.check_block_no_value(&body);
3515 let cond_ty = self.expr_ty(&cond);
3516 let body_ty = self.node_ty(body.id);
3517 if cond_ty.references_error() || body_ty.references_error() {
3518 self.write_error(id);
3524 hir::ExprLoop(ref body, _) => {
3525 self.check_block_no_value(&body);
3526 if !may_break(tcx, expr.id, &body) {
3527 self.write_never(id);
3532 hir::ExprMatch(ref discrim, ref arms, match_src) => {
3533 self.check_match(expr, &discrim, arms, expected, match_src);
3535 hir::ExprClosure(capture, ref decl, ref body, _) => {
3536 self.check_expr_closure(expr, capture, &decl, &body, expected);
3538 hir::ExprBlock(ref b) => {
3539 self.check_block_with_expected(&b, expected);
3540 self.write_ty(id, self.node_ty(b.id));
3542 hir::ExprCall(ref callee, ref args) => {
3543 self.check_call(expr, &callee, &args[..], expected);
3545 // we must check that return type of called functions is WF:
3546 let ret_ty = self.expr_ty(expr);
3547 self.register_wf_obligation(ret_ty, expr.span, traits::MiscObligation);
3549 hir::ExprMethodCall(name, ref tps, ref args) => {
3550 self.check_method_call(expr, name, &args[..], &tps[..], expected, lvalue_pref);
3551 let arg_tys = args.iter().map(|a| self.expr_ty(&a));
3552 let args_err = arg_tys.fold(false, |rest_err, a| rest_err || a.references_error());
3554 self.write_error(id);
3557 hir::ExprCast(ref e, ref t) => {
3558 if let hir::TyFixedLengthVec(_, ref count_expr) = t.node {
3559 self.check_expr_with_hint(&count_expr, tcx.types.usize);
3562 // Find the type of `e`. Supply hints based on the type we are casting to,
3564 let t_cast = self.to_ty(t);
3565 let t_cast = self.resolve_type_vars_if_possible(&t_cast);
3566 self.check_expr_with_expectation(e, ExpectCastableToType(t_cast));
3567 let t_expr = self.expr_ty(e);
3568 let t_cast = self.resolve_type_vars_if_possible(&t_cast);
3570 // Eagerly check for some obvious errors.
3571 if t_expr.references_error() || t_cast.references_error() {
3572 self.write_error(id);
3574 // Write a type for the whole expression, assuming everything is going
3576 self.write_ty(id, t_cast);
3578 // Defer other checks until we're done type checking.
3579 let mut deferred_cast_checks = self.deferred_cast_checks.borrow_mut();
3580 match cast::CastCheck::new(self, e, t_expr, t_cast, t.span, expr.span) {
3582 deferred_cast_checks.push(cast_check);
3584 Err(ErrorReported) => {
3585 self.write_error(id);
3590 hir::ExprType(ref e, ref t) => {
3591 let typ = self.to_ty(&t);
3592 self.check_expr_eq_type(&e, typ);
3593 self.write_ty(id, typ);
3595 hir::ExprVec(ref args) => {
3596 let uty = expected.to_option(self).and_then(|uty| {
3598 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3603 let mut unified = self.next_ty_var();
3604 let coerce_to = uty.unwrap_or(unified);
3606 for (i, e) in args.iter().enumerate() {
3607 self.check_expr_with_hint(e, coerce_to);
3608 let e_ty = self.expr_ty(e);
3609 let origin = TypeOrigin::Misc(e.span);
3611 // Special-case the first element, as it has no "previous expressions".
3612 let result = if i == 0 {
3613 self.try_coerce(e, coerce_to)
3615 let prev_elems = || args[..i].iter().map(|e| &**e);
3616 self.try_find_coercion_lub(origin, prev_elems, unified, e)
3620 Ok(ty) => unified = ty,
3622 self.report_mismatched_types(origin, unified, e_ty, e);
3626 self.write_ty(id, tcx.mk_array(unified, args.len()));
3628 hir::ExprRepeat(ref element, ref count_expr) => {
3629 self.check_expr_has_type(&count_expr, tcx.types.usize);
3630 let count = eval_length(self.tcx.global_tcx(), &count_expr, "repeat count")
3633 let uty = match expected {
3634 ExpectHasType(uty) => {
3636 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3643 let (element_ty, t) = match uty {
3645 self.check_expr_coercable_to_type(&element, uty);
3649 let t: Ty = self.next_ty_var();
3650 self.check_expr_has_type(&element, t);
3651 (self.expr_ty(&element), t)
3656 // For [foo, ..n] where n > 1, `foo` must have
3658 self.require_type_meets(t, expr.span, traits::RepeatVec, ty::BoundCopy);
3661 if element_ty.references_error() {
3662 self.write_error(id);
3664 let t = tcx.mk_array(t, count);
3665 self.write_ty(id, t);
3668 hir::ExprTup(ref elts) => {
3669 let flds = expected.only_has_type(self).and_then(|ty| {
3671 ty::TyTuple(ref flds) => Some(&flds[..]),
3675 let mut err_field = false;
3677 let elt_ts = elts.iter().enumerate().map(|(i, e)| {
3678 let t = match flds {
3679 Some(ref fs) if i < fs.len() => {
3681 self.check_expr_coercable_to_type(&e, ety);
3685 self.check_expr_with_expectation(&e, NoExpectation);
3689 err_field = err_field || t.references_error();
3693 self.write_error(id);
3695 let typ = tcx.mk_tup(elt_ts);
3696 self.write_ty(id, typ);
3699 hir::ExprStruct(ref path, ref fields, ref base_expr) => {
3700 self.check_expr_struct(expr, path, fields, base_expr);
3702 self.require_expr_have_sized_type(expr, traits::StructInitializerSized);
3704 hir::ExprField(ref base, ref field) => {
3705 self.check_field(expr, lvalue_pref, &base, field);
3707 hir::ExprTupField(ref base, idx) => {
3708 self.check_tup_field(expr, lvalue_pref, &base, idx);
3710 hir::ExprIndex(ref base, ref idx) => {
3711 self.check_expr_with_lvalue_pref(&base, lvalue_pref);
3712 self.check_expr(&idx);
3714 let base_t = self.expr_ty(&base);
3715 let idx_t = self.expr_ty(&idx);
3717 if base_t.references_error() {
3718 self.write_ty(id, base_t);
3719 } else if idx_t.references_error() {
3720 self.write_ty(id, idx_t);
3722 let base_t = self.structurally_resolved_type(expr.span, base_t);
3723 match self.lookup_indexing(expr, base, base_t, idx_t, lvalue_pref) {
3724 Some((index_ty, element_ty)) => {
3725 let idx_expr_ty = self.expr_ty(idx);
3726 self.demand_eqtype(expr.span, index_ty, idx_expr_ty);
3727 self.write_ty(id, element_ty);
3730 self.check_expr_has_type(&idx, self.tcx.types.err);
3731 let mut err = self.type_error_struct(
3734 format!("cannot index a value of type `{}`",
3738 // Try to give some advice about indexing tuples.
3739 if let ty::TyTuple(_) = base_t.sty {
3740 let mut needs_note = true;
3741 // If the index is an integer, we can show the actual
3742 // fixed expression:
3743 if let hir::ExprLit(ref lit) = idx.node {
3744 if let ast::LitKind::Int(i,
3745 ast::LitIntType::Unsuffixed) = lit.node {
3746 let snip = tcx.sess.codemap().span_to_snippet(base.span);
3747 if let Ok(snip) = snip {
3748 err.span_suggestion(expr.span,
3749 "to access tuple elements, \
3750 use tuple indexing syntax \
3752 format!("{}.{}", snip, i));
3758 err.help("to access tuple elements, use tuple indexing \
3759 syntax (e.g. `tuple.0`)");
3763 self.write_ty(id, self.tcx().types.err);
3770 debug!("type of expr({}) {} is...", expr.id,
3771 pprust::expr_to_string(expr));
3772 debug!("... {:?}, expected is {:?}",
3777 // Finish resolving a path in a struct expression or pattern `S::A { .. }` if necessary.
3778 // The newly resolved definition is written into `def_map`.
3779 pub fn finish_resolving_struct_path(&self,
3781 node_id: ast::NodeId,
3785 let path_res = self.tcx().expect_resolution(node_id);
3786 if path_res.depth == 0 {
3787 // If fully resolved already, we don't have to do anything.
3790 let base_ty_end = path.segments.len() - path_res.depth;
3791 let (_ty, def) = AstConv::finish_resolving_def_to_ty(self, self, span,
3792 PathParamMode::Optional,
3796 &path.segments[..base_ty_end],
3797 &path.segments[base_ty_end..]);
3798 // Write back the new resolution.
3799 self.tcx().def_map.borrow_mut().insert(node_id, PathResolution::new(def));
3804 // Resolve associated value path into a base type and associated constant or method definition.
3805 // The newly resolved definition is written into `def_map`.
3806 pub fn resolve_ty_and_def_ufcs<'b>(&self,
3807 opt_self_ty: Option<Ty<'tcx>>,
3808 path: &'b hir::Path,
3809 node_id: ast::NodeId,
3811 -> (Def, Option<Ty<'tcx>>, &'b [hir::PathSegment])
3813 let path_res = self.tcx().expect_resolution(node_id);
3814 if path_res.depth == 0 {
3815 // If fully resolved already, we don't have to do anything.
3816 (path_res.base_def, opt_self_ty, &path.segments)
3818 // Try to resolve everything except for the last segment as a type.
3819 let ty_segments = path.segments.split_last().unwrap().1;
3820 let base_ty_end = path.segments.len() - path_res.depth;
3821 let (ty, _def) = AstConv::finish_resolving_def_to_ty(self, self, span,
3822 PathParamMode::Optional,
3826 &ty_segments[..base_ty_end],
3827 &ty_segments[base_ty_end..]);
3829 // Resolve an associated constant or method on the previously resolved type.
3830 let item_segment = path.segments.last().unwrap();
3831 let item_name = item_segment.name;
3832 let def = match self.resolve_ufcs(span, item_name, ty, node_id) {
3835 let def = match error {
3836 method::MethodError::PrivateMatch(def) => def,
3839 if item_name != keywords::Invalid.name() {
3840 self.report_method_error(span, ty, item_name, None, error);
3846 // Write back the new resolution.
3847 self.tcx().def_map.borrow_mut().insert(node_id, PathResolution::new(def));
3848 (def, Some(ty), slice::ref_slice(item_segment))
3852 pub fn check_decl_initializer(&self,
3853 local: &'gcx hir::Local,
3854 init: &'gcx hir::Expr)
3856 let ref_bindings = self.tcx.pat_contains_ref_binding(&local.pat);
3858 let local_ty = self.local_ty(init.span, local.id);
3859 if let Some(m) = ref_bindings {
3860 // Somewhat subtle: if we have a `ref` binding in the pattern,
3861 // we want to avoid introducing coercions for the RHS. This is
3862 // both because it helps preserve sanity and, in the case of
3863 // ref mut, for soundness (issue #23116). In particular, in
3864 // the latter case, we need to be clear that the type of the
3865 // referent for the reference that results is *equal to* the
3866 // type of the lvalue it is referencing, and not some
3867 // supertype thereof.
3868 self.check_expr_with_lvalue_pref(init, LvaluePreference::from_mutbl(m));
3869 let init_ty = self.expr_ty(init);
3870 self.demand_eqtype(init.span, init_ty, local_ty);
3872 self.check_expr_coercable_to_type(init, local_ty)
3876 pub fn check_decl_local(&self, local: &'gcx hir::Local) {
3877 let t = self.local_ty(local.span, local.id);
3878 self.write_ty(local.id, t);
3880 if let Some(ref init) = local.init {
3881 self.check_decl_initializer(local, &init);
3882 let init_ty = self.expr_ty(&init);
3883 if init_ty.references_error() {
3884 self.write_ty(local.id, init_ty);
3888 self.check_pat(&local.pat, t);
3889 let pat_ty = self.node_ty(local.pat.id);
3890 if pat_ty.references_error() {
3891 self.write_ty(local.id, pat_ty);
3895 pub fn check_stmt(&self, stmt: &'gcx hir::Stmt) {
3897 let mut saw_bot = false;
3898 let mut saw_err = false;
3900 hir::StmtDecl(ref decl, id) => {
3903 hir::DeclLocal(ref l) => {
3904 self.check_decl_local(&l);
3905 let l_t = self.node_ty(l.id);
3906 saw_bot = saw_bot || self.type_var_diverges(l_t);
3907 saw_err = saw_err || l_t.references_error();
3909 hir::DeclItem(_) => {/* ignore for now */ }
3912 hir::StmtExpr(ref expr, id) => {
3914 // Check with expected type of ()
3915 self.check_expr_has_type(&expr, self.tcx.mk_nil());
3916 let expr_ty = self.expr_ty(&expr);
3917 saw_bot = saw_bot || self.type_var_diverges(expr_ty);
3918 saw_err = saw_err || expr_ty.references_error();
3920 hir::StmtSemi(ref expr, id) => {
3922 self.check_expr(&expr);
3923 let expr_ty = self.expr_ty(&expr);
3924 saw_bot |= self.type_var_diverges(expr_ty);
3925 saw_err |= expr_ty.references_error();
3929 self.write_ty(node_id, self.next_diverging_ty_var());
3932 self.write_error(node_id);
3935 self.write_nil(node_id)
3939 pub fn check_block_no_value(&self, blk: &'gcx hir::Block) {
3940 self.check_block_with_expected(blk, ExpectHasType(self.tcx.mk_nil()));
3941 let blkty = self.node_ty(blk.id);
3942 if blkty.references_error() {
3943 self.write_error(blk.id);
3945 let nilty = self.tcx.mk_nil();
3946 self.demand_suptype(blk.span, nilty, blkty);
3950 fn check_block_with_expected(&self,
3951 blk: &'gcx hir::Block,
3952 expected: Expectation<'tcx>) {
3954 let mut fcx_ps = self.ps.borrow_mut();
3955 let unsafety_state = fcx_ps.recurse(blk);
3956 replace(&mut *fcx_ps, unsafety_state)
3959 let mut warned = false;
3960 let mut any_diverges = false;
3961 let mut any_err = false;
3962 for s in &blk.stmts {
3964 let s_id = s.node.id();
3965 let s_ty = self.node_ty(s_id);
3966 if any_diverges && !warned && match s.node {
3967 hir::StmtDecl(ref decl, _) => {
3969 hir::DeclLocal(_) => true,
3973 hir::StmtExpr(_, _) | hir::StmtSemi(_, _) => true,
3977 .add_lint(lint::builtin::UNREACHABLE_CODE,
3980 "unreachable statement".to_string());
3983 // FIXME(canndrew): This is_never should probably be an is_uninhabited
3984 any_diverges = any_diverges ||
3985 self.type_var_diverges(s_ty) ||
3987 any_err = any_err || s_ty.references_error();
3990 None => if any_err {
3991 self.write_error(blk.id);
3992 } else if any_diverges {
3993 self.write_ty(blk.id, self.next_diverging_ty_var());
3995 self.write_nil(blk.id);
3998 if any_diverges && !warned {
4001 .add_lint(lint::builtin::UNREACHABLE_CODE,
4004 "unreachable expression".to_string());
4006 let ety = match expected {
4007 ExpectHasType(ety) => {
4008 self.check_expr_coercable_to_type(&e, ety);
4012 self.check_expr_with_expectation(&e, expected);
4018 self.write_error(blk.id);
4019 } else if any_diverges {
4020 self.write_ty(blk.id, self.next_diverging_ty_var());
4022 self.write_ty(blk.id, ety);
4027 *self.ps.borrow_mut() = prev;
4030 // Instantiates the given path, which must refer to an item with the given
4031 // number of type parameters and type.
4032 pub fn instantiate_value_path(&self,
4033 segments: &[hir::PathSegment],
4034 opt_self_ty: Option<Ty<'tcx>>,
4037 node_id: ast::NodeId)
4039 debug!("instantiate_value_path(path={:?}, def={:?}, node_id={})",
4044 // We need to extract the type parameters supplied by the user in
4045 // the path `path`. Due to the current setup, this is a bit of a
4046 // tricky-process; the problem is that resolve only tells us the
4047 // end-point of the path resolution, and not the intermediate steps.
4048 // Luckily, we can (at least for now) deduce the intermediate steps
4049 // just from the end-point.
4051 // There are basically four cases to consider:
4053 // 1. Reference to a *type*, such as a struct or enum:
4055 // mod a { struct Foo<T> { ... } }
4057 // Because we don't allow types to be declared within one
4058 // another, a path that leads to a type will always look like
4059 // `a::b::Foo<T>` where `a` and `b` are modules. This implies
4060 // that only the final segment can have type parameters, and
4061 // they are located in the TypeSpace.
4063 // *Note:* Generally speaking, references to types don't
4064 // actually pass through this function, but rather the
4065 // `ast_ty_to_ty` function in `astconv`. However, in the case
4066 // of struct patterns (and maybe literals) we do invoke
4067 // `instantiate_value_path` to get the general type of an instance of
4068 // a struct. (In these cases, there are actually no type
4069 // parameters permitted at present, but perhaps we will allow
4070 // them in the future.)
4072 // 1b. Reference to an enum variant or tuple-like struct:
4074 // struct foo<T>(...)
4075 // enum E<T> { foo(...) }
4077 // In these cases, the parameters are declared in the type
4080 // 2. Reference to a *fn item*:
4084 // In this case, the path will again always have the form
4085 // `a::b::foo::<T>` where only the final segment should have
4086 // type parameters. However, in this case, those parameters are
4087 // declared on a value, and hence are in the `FnSpace`.
4089 // 3. Reference to a *method*:
4091 // impl<A> SomeStruct<A> {
4095 // Here we can have a path like
4096 // `a::b::SomeStruct::<A>::foo::<B>`, in which case parameters
4097 // may appear in two places. The penultimate segment,
4098 // `SomeStruct::<A>`, contains parameters in TypeSpace, and the
4099 // final segment, `foo::<B>` contains parameters in fn space.
4101 // 4. Reference to an *associated const*:
4103 // impl<A> AnotherStruct<A> {
4104 // const FOO: B = BAR;
4107 // The path in this case will look like
4108 // `a::b::AnotherStruct::<A>::FOO`, so the penultimate segment
4109 // only will have parameters in TypeSpace.
4111 // The first step then is to categorize the segments appropriately.
4113 assert!(!segments.is_empty());
4115 let mut ufcs_associated = None;
4116 let mut type_segment = None;
4117 let mut fn_segment = None;
4119 // Case 1 and 1b. Reference to a *type* or *enum variant*.
4120 Def::Struct(def_id) |
4121 Def::Variant(_, def_id) |
4123 Def::TyAlias(def_id) |
4124 Def::AssociatedTy(_, def_id) |
4125 Def::Trait(def_id) => {
4126 // Everything but the final segment should have no
4127 // parameters at all.
4128 let mut generics = self.tcx.lookup_generics(def_id);
4129 if let Some(def_id) = generics.parent {
4130 // Variant and struct constructors use the
4131 // generics of their parent type definition.
4132 generics = self.tcx.lookup_generics(def_id);
4134 type_segment = Some((segments.last().unwrap(), generics));
4137 // Case 2. Reference to a top-level value.
4139 Def::Const(def_id) |
4140 Def::Static(def_id, _) => {
4141 fn_segment = Some((segments.last().unwrap(),
4142 self.tcx.lookup_generics(def_id)));
4145 // Case 3. Reference to a method or associated const.
4146 Def::Method(def_id) |
4147 Def::AssociatedConst(def_id) => {
4148 let container = self.tcx.impl_or_trait_item(def_id).container();
4150 ty::TraitContainer(trait_did) => {
4151 callee::check_legal_trait_for_method_call(self.ccx, span, trait_did)
4153 ty::ImplContainer(_) => {}
4156 let generics = self.tcx.lookup_generics(def_id);
4157 if segments.len() >= 2 {
4158 let parent_generics = self.tcx.lookup_generics(generics.parent.unwrap());
4159 type_segment = Some((&segments[segments.len() - 2], parent_generics));
4161 // `<T>::assoc` will end up here, and so can `T::assoc`.
4162 let self_ty = opt_self_ty.expect("UFCS sugared assoc missing Self");
4163 ufcs_associated = Some((container, self_ty));
4165 fn_segment = Some((segments.last().unwrap(), generics));
4168 // Other cases. Various nonsense that really shouldn't show up
4169 // here. If they do, an error will have been reported
4170 // elsewhere. (I hope)
4172 Def::ForeignMod(..) |
4182 // In `<T as Trait<A, B>>::method`, `A` and `B` are mandatory, but
4183 // `opt_self_ty` can also be Some for `Foo::method`, where Foo's
4184 // type parameters are not mandatory.
4185 let require_type_space = opt_self_ty.is_some() && ufcs_associated.is_none();
4187 debug!("type_segment={:?} fn_segment={:?}", type_segment, fn_segment);
4189 // Now that we have categorized what space the parameters for each
4190 // segment belong to, let's sort out the parameters that the user
4191 // provided (if any) into their appropriate spaces. We'll also report
4192 // errors if type parameters are provided in an inappropriate place.
4193 let poly_segments = type_segment.is_some() as usize +
4194 fn_segment.is_some() as usize;
4195 self.tcx.prohibit_type_params(&segments[..segments.len() - poly_segments]);
4198 Def::Local(_, nid) | Def::Upvar(_, nid, _, _) => {
4199 let ty = self.local_ty(span, nid);
4200 let ty = self.normalize_associated_types_in(span, &ty);
4201 self.write_ty(node_id, ty);
4202 self.write_substs(node_id, ty::ItemSubsts {
4203 substs: Substs::empty(self.tcx)
4210 // Now we have to compare the types that the user *actually*
4211 // provided against the types that were *expected*. If the user
4212 // did not provide any types, then we want to substitute inference
4213 // variables. If the user provided some types, we may still need
4214 // to add defaults. If the user provided *too many* types, that's
4216 self.check_path_parameter_count(span, !require_type_space, &mut type_segment);
4217 self.check_path_parameter_count(span, true, &mut fn_segment);
4219 let (fn_start, has_self) = match (type_segment, fn_segment) {
4220 (_, Some((_, generics))) => {
4221 (generics.parent_count(), generics.has_self)
4223 (Some((_, generics)), None) => {
4224 (generics.own_count(), generics.has_self)
4226 (None, None) => (0, false)
4228 let substs = Substs::for_item(self.tcx, def.def_id(), |def, _| {
4229 let mut i = def.index as usize;
4231 let segment = if i < fn_start {
4232 i -= has_self as usize;
4238 let lifetimes = match segment.map(|(s, _)| &s.parameters) {
4239 Some(&hir::AngleBracketedParameters(ref data)) => &data.lifetimes[..],
4240 Some(&hir::ParenthesizedParameters(_)) => bug!(),
4244 if let Some(ast_lifetime) = lifetimes.get(i) {
4245 ast_region_to_region(self.tcx, ast_lifetime)
4247 self.region_var_for_def(span, def)
4250 let mut i = def.index as usize;
4252 let can_omit = i >= fn_start || !require_type_space;
4253 let segment = if i < fn_start {
4254 // Handle Self first, so we can adjust the index to match the AST.
4255 if has_self && i == 0 {
4256 return opt_self_ty.unwrap_or_else(|| {
4257 self.type_var_for_def(span, def, substs)
4260 i -= has_self as usize;
4266 let types = match segment.map(|(s, _)| &s.parameters) {
4267 Some(&hir::AngleBracketedParameters(ref data)) => &data.types[..],
4268 Some(&hir::ParenthesizedParameters(_)) => bug!(),
4272 // Skip over the lifetimes in the same segment.
4273 if let Some((_, generics)) = segment {
4274 i -= generics.regions.len();
4277 let omitted = can_omit && types.is_empty();
4278 if let Some(ast_ty) = types.get(i) {
4279 // A provided type parameter.
4281 } else if let (false, Some(default)) = (omitted, def.default) {
4282 // No type parameter provided, but a default exists.
4283 default.subst_spanned(self.tcx, substs, Some(span))
4285 // No type parameters were provided, we can infer all.
4286 // This can also be reached in some error cases:
4287 // We prefer to use inference variables instead of
4288 // TyError to let type inference recover somewhat.
4289 self.type_var_for_def(span, def, substs)
4293 // The things we are substituting into the type should not contain
4294 // escaping late-bound regions, and nor should the base type scheme.
4295 let scheme = self.tcx.lookup_item_type(def.def_id());
4296 let type_predicates = self.tcx.lookup_predicates(def.def_id());
4297 assert!(!substs.has_escaping_regions());
4298 assert!(!scheme.ty.has_escaping_regions());
4300 // Add all the obligations that are required, substituting and
4301 // normalized appropriately.
4302 let bounds = self.instantiate_bounds(span, &substs, &type_predicates);
4303 self.add_obligations_for_parameters(
4304 traits::ObligationCause::new(span, self.body_id, traits::ItemObligation(def.def_id())),
4307 // Substitute the values for the type parameters into the type of
4308 // the referenced item.
4309 let ty_substituted = self.instantiate_type_scheme(span, &substs, &scheme.ty);
4312 if let Some((ty::ImplContainer(impl_def_id), self_ty)) = ufcs_associated {
4313 // In the case of `Foo<T>::method` and `<Foo<T>>::method`, if `method`
4314 // is inherent, there is no `Self` parameter, instead, the impl needs
4315 // type parameters, which we can infer by unifying the provided `Self`
4316 // with the substituted impl type.
4317 let impl_scheme = self.tcx.lookup_item_type(impl_def_id);
4319 let impl_ty = self.instantiate_type_scheme(span, &substs, &impl_scheme.ty);
4320 match self.sub_types(false, TypeOrigin::Misc(span), self_ty, impl_ty) {
4321 Ok(InferOk { obligations, .. }) => {
4322 // FIXME(#32730) propagate obligations
4323 assert!(obligations.is_empty());
4327 "instantiate_value_path: (UFCS) {:?} was a subtype of {:?} but now is not?",
4334 debug!("instantiate_value_path: type of {:?} is {:?}",
4337 self.write_ty(node_id, ty_substituted);
4338 self.write_substs(node_id, ty::ItemSubsts {
4344 /// Report errors if the provided parameters are too few or too many.
4345 fn check_path_parameter_count(&self,
4348 segment: &mut Option<(&hir::PathSegment, &ty::Generics)>) {
4349 let (lifetimes, types, bindings) = match segment.map(|(s, _)| &s.parameters) {
4350 Some(&hir::AngleBracketedParameters(ref data)) => {
4351 (&data.lifetimes[..], &data.types[..], &data.bindings[..])
4353 Some(&hir::ParenthesizedParameters(_)) => {
4354 span_bug!(span, "parenthesized parameters cannot appear in ExprPath");
4356 None => (&[][..], &[][..], &[][..])
4360 format!("{} parameter{}", n, if n == 1 { "" } else { "s" })
4363 // Check provided lifetime parameters.
4364 let lifetime_defs = segment.map_or(&[][..], |(_, generics)| &generics.regions);
4365 if lifetimes.len() > lifetime_defs.len() {
4366 let span = lifetimes[lifetime_defs.len()].span;
4367 span_err!(self.tcx.sess, span, E0088,
4368 "too many lifetime parameters provided: \
4369 expected {}, found {}",
4370 count(lifetime_defs.len()),
4371 count(lifetimes.len()));
4372 } else if lifetimes.len() > 0 && lifetimes.len() < lifetime_defs.len() {
4373 span_err!(self.tcx.sess, span, E0090,
4374 "too few lifetime parameters provided: \
4375 expected {}, found {}",
4376 count(lifetime_defs.len()),
4377 count(lifetimes.len()));
4380 // Check provided type parameters.
4381 let type_defs = segment.map_or(&[][..], |(_, generics)| {
4382 if generics.parent.is_none() {
4383 &generics.types[generics.has_self as usize..]
4388 let required_len = type_defs.iter()
4389 .take_while(|d| d.default.is_none())
4391 if types.len() > type_defs.len() {
4392 let span = types[type_defs.len()].span;
4393 struct_span_err!(self.tcx.sess, span, E0087,
4394 "too many type parameters provided: \
4395 expected at most {}, found {}",
4396 count(type_defs.len()),
4398 .span_label(span, &format!("too many type parameters")).emit();
4400 // To prevent derived errors to accumulate due to extra
4401 // type parameters, we force instantiate_value_path to
4402 // use inference variables instead of the provided types.
4404 } else if !(can_omit && types.len() == 0) && types.len() < required_len {
4405 let adjust = |len| if len > 1 { "parameters" } else { "parameter" };
4406 let required_param_str = adjust(required_len);
4407 let actual_param_str = adjust(types.len());
4408 struct_span_err!(self.tcx.sess, span, E0089,
4409 "too few type parameters provided: \
4410 expected {} {}, found {} {}",
4411 count(required_len),
4415 .span_label(span, &format!("expected {} type {}", required_len, required_param_str))
4419 if !bindings.is_empty() {
4420 span_err!(self.tcx.sess, bindings[0].span, E0182,
4421 "unexpected binding of associated item in expression path \
4422 (only allowed in type paths)");
4426 fn structurally_resolve_type_or_else<F>(&self, sp: Span, ty: Ty<'tcx>, f: F)
4428 where F: Fn() -> Ty<'tcx>
4430 let mut ty = self.resolve_type_vars_with_obligations(ty);
4433 let alternative = f();
4436 if alternative.is_ty_var() || alternative.references_error() {
4437 if !self.is_tainted_by_errors() {
4438 self.type_error_message(sp, |_actual| {
4439 "the type of this value must be known in this context".to_string()
4442 self.demand_suptype(sp, self.tcx.types.err, ty);
4443 ty = self.tcx.types.err;
4445 self.demand_suptype(sp, alternative, ty);
4453 // Resolves `typ` by a single level if `typ` is a type variable. If no
4454 // resolution is possible, then an error is reported.
4455 pub fn structurally_resolved_type(&self, sp: Span, ty: Ty<'tcx>) -> Ty<'tcx> {
4456 self.structurally_resolve_type_or_else(sp, ty, || {
4462 // Returns true if b contains a break that can exit from b
4463 pub fn may_break(tcx: TyCtxt, id: ast::NodeId, b: &hir::Block) -> bool {
4464 // First: is there an unlabeled break immediately
4466 (loop_query(&b, |e| {
4468 hir::ExprBreak(None) => true,
4472 // Second: is there a labeled break with label
4473 // <id> nested anywhere inside the loop?
4474 (block_query(b, |e| {
4475 if let hir::ExprBreak(Some(_)) = e.node {
4476 tcx.expect_def(e.id) == Def::Label(id)
4483 pub fn check_bounds_are_used<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
4484 generics: &hir::Generics,
4486 debug!("check_bounds_are_used(n_tps={}, ty={:?})",
4487 generics.ty_params.len(), ty);
4489 // make a vector of booleans initially false, set to true when used
4490 if generics.ty_params.is_empty() { return; }
4491 let mut tps_used = vec![false; generics.ty_params.len()];
4493 for leaf_ty in ty.walk() {
4494 if let ty::TyParam(ParamTy {idx, ..}) = leaf_ty.sty {
4495 debug!("Found use of ty param num {}", idx);
4496 tps_used[idx as usize - generics.lifetimes.len()] = true;
4500 for (&used, param) in tps_used.iter().zip(&generics.ty_params) {
4502 struct_span_err!(ccx.tcx.sess, param.span, E0091,
4503 "type parameter `{}` is unused",
4505 .span_label(param.span, &format!("unused type parameter"))