1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
15 Within the check phase of type check, we check each item one at a time
16 (bodies of function expressions are checked as part of the containing
17 function). Inference is used to supply types wherever they are
20 By far the most complex case is checking the body of a function. This
21 can be broken down into several distinct phases:
23 - gather: creates type variables to represent the type of each local
24 variable and pattern binding.
26 - main: the main pass does the lion's share of the work: it
27 determines the types of all expressions, resolves
28 methods, checks for most invalid conditions, and so forth. In
29 some cases, where a type is unknown, it may create a type or region
30 variable and use that as the type of an expression.
32 In the process of checking, various constraints will be placed on
33 these type variables through the subtyping relationships requested
34 through the `demand` module. The `infer` module is in charge
35 of resolving those constraints.
37 - regionck: after main is complete, the regionck pass goes over all
38 types looking for regions and making sure that they did not escape
39 into places they are not in scope. This may also influence the
40 final assignments of the various region variables if there is some
43 - vtable: find and records the impls to use for each trait bound that
44 appears on a type parameter.
46 - writeback: writes the final types within a function body, replacing
47 type variables with their final inferred types. These final types
48 are written into the `tcx.node_types` table, which should *never* contain
49 any reference to a type variable.
53 While type checking a function, the intermediate types for the
54 expressions, blocks, and so forth contained within the function are
55 stored in `fcx.node_types` and `fcx.item_substs`. These types
56 may contain unresolved type variables. After type checking is
57 complete, the functions in the writeback module are used to take the
58 types from this table, resolve them, and then write them into their
59 permanent home in the type context `ccx.tcx`.
61 This means that during inferencing you should use `fcx.write_ty()`
62 and `fcx.expr_ty()` / `fcx.node_ty()` to write/obtain the types of
63 nodes within the function.
65 The types of top-level items, which never contain unbound type
66 variables, are stored directly into the `tcx` tables.
68 n.b.: A type variable is not the same thing as a type parameter. A
69 type variable is rather an "instance" of a type parameter: that is,
70 given a generic function `fn foo<T>(t: T)`: while checking the
71 function `foo`, the type `ty_param(0)` refers to the type `T`, which
72 is treated in abstract. When `foo()` is called, however, `T` will be
73 substituted for a fresh type variable `N`. This variable will
74 eventually be resolved to some concrete type (which might itself be
79 pub use self::Expectation::*;
80 pub use self::compare_method::{compare_impl_method, compare_const_impl};
81 use self::TupleArgumentsFlag::*;
83 use astconv::{AstConv, ast_region_to_region, PathParamMode};
84 use dep_graph::DepNode;
85 use fmt_macros::{Parser, Piece, Position};
86 use middle::cstore::LOCAL_CRATE;
87 use hir::def::{Def, PathResolution};
88 use hir::def_id::DefId;
90 use rustc::infer::{self, InferCtxt, InferOk, TypeOrigin, TypeTrace, type_variable};
91 use rustc::ty::subst::{Subst, Substs};
92 use rustc::traits::{self, Reveal};
93 use rustc::ty::{ParamTy, ParameterEnvironment};
94 use rustc::ty::{LvaluePreference, NoPreference, PreferMutLvalue};
95 use rustc::ty::{self, ToPolyTraitRef, Ty, TyCtxt, Visibility};
96 use rustc::ty::{MethodCall, MethodCallee};
97 use rustc::ty::adjustment;
98 use rustc::ty::fold::{BottomUpFolder, TypeFoldable};
99 use rustc::ty::util::{Representability, IntTypeExt};
100 use require_c_abi_if_variadic;
101 use rscope::{ElisionFailureInfo, RegionScope};
102 use session::{Session, CompileResult};
106 use util::common::{block_query, ErrorReported, indenter, loop_query};
107 use util::nodemap::{DefIdMap, FnvHashMap, NodeMap};
109 use std::cell::{Cell, Ref, RefCell};
110 use std::collections::{HashSet};
111 use std::mem::replace;
113 use syntax::abi::Abi;
116 use syntax::attr::AttrMetaMethods;
117 use syntax::codemap::{self, Spanned};
118 use syntax::feature_gate::{GateIssue, emit_feature_err};
119 use syntax::parse::token::{self, InternedString, keywords};
121 use syntax::util::lev_distance::find_best_match_for_name;
122 use syntax_pos::{self, Span};
123 use errors::DiagnosticBuilder;
125 use rustc::hir::intravisit::{self, Visitor};
126 use rustc::hir::{self, PatKind};
127 use rustc::hir::print as pprust;
128 use rustc_back::slice;
129 use rustc_const_eval::eval_length;
149 /// closures defined within the function. For example:
152 /// bar(move|| { ... })
155 /// Here, the function `foo()` and the closure passed to
156 /// `bar()` will each have their own `FnCtxt`, but they will
157 /// share the inherited fields.
158 pub struct Inherited<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
159 ccx: &'a CrateCtxt<'a, 'gcx>,
160 infcx: InferCtxt<'a, 'gcx, 'tcx>,
161 locals: RefCell<NodeMap<Ty<'tcx>>>,
163 fulfillment_cx: RefCell<traits::FulfillmentContext<'tcx>>,
165 // When we process a call like `c()` where `c` is a closure type,
166 // we may not have decided yet whether `c` is a `Fn`, `FnMut`, or
167 // `FnOnce` closure. In that case, we defer full resolution of the
168 // call until upvar inference can kick in and make the
169 // decision. We keep these deferred resolutions grouped by the
170 // def-id of the closure, so that once we decide, we can easily go
171 // back and process them.
172 deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolutionHandler<'gcx, 'tcx>>>>,
174 deferred_cast_checks: RefCell<Vec<cast::CastCheck<'tcx>>>,
176 // Anonymized types found in explicit return types and their
177 // associated fresh inference variable. Writeback resolves these
178 // variables to get the concrete type, which can be used to
179 // deanonymize TyAnon, after typeck is done with all functions.
180 anon_types: RefCell<DefIdMap<Ty<'tcx>>>,
182 // Obligations which will have to be checked at the end of
183 // type-checking, after all functions have been inferred.
184 deferred_obligations: RefCell<Vec<traits::DeferredObligation<'tcx>>>,
187 impl<'a, 'gcx, 'tcx> Deref for Inherited<'a, 'gcx, 'tcx> {
188 type Target = InferCtxt<'a, 'gcx, 'tcx>;
189 fn deref(&self) -> &Self::Target {
194 trait DeferredCallResolution<'gcx, 'tcx> {
195 fn resolve<'a>(&mut self, fcx: &FnCtxt<'a, 'gcx, 'tcx>);
198 type DeferredCallResolutionHandler<'gcx, 'tcx> = Box<DeferredCallResolution<'gcx, 'tcx>+'tcx>;
200 /// When type-checking an expression, we propagate downward
201 /// whatever type hint we are able in the form of an `Expectation`.
202 #[derive(Copy, Clone, Debug)]
203 pub enum Expectation<'tcx> {
204 /// We know nothing about what type this expression should have.
207 /// This expression should have the type given (or some subtype)
208 ExpectHasType(Ty<'tcx>),
210 /// This expression will be cast to the `Ty`
211 ExpectCastableToType(Ty<'tcx>),
213 /// This rvalue expression will be wrapped in `&` or `Box` and coerced
214 /// to `&Ty` or `Box<Ty>`, respectively. `Ty` is `[A]` or `Trait`.
215 ExpectRvalueLikeUnsized(Ty<'tcx>),
218 impl<'a, 'gcx, 'tcx> Expectation<'tcx> {
219 // Disregard "castable to" expectations because they
220 // can lead us astray. Consider for example `if cond
221 // {22} else {c} as u8` -- if we propagate the
222 // "castable to u8" constraint to 22, it will pick the
223 // type 22u8, which is overly constrained (c might not
224 // be a u8). In effect, the problem is that the
225 // "castable to" expectation is not the tightest thing
226 // we can say, so we want to drop it in this case.
227 // The tightest thing we can say is "must unify with
228 // else branch". Note that in the case of a "has type"
229 // constraint, this limitation does not hold.
231 // If the expected type is just a type variable, then don't use
232 // an expected type. Otherwise, we might write parts of the type
233 // when checking the 'then' block which are incompatible with the
235 fn adjust_for_branches(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Expectation<'tcx> {
237 ExpectHasType(ety) => {
238 let ety = fcx.shallow_resolve(ety);
239 if !ety.is_ty_var() {
245 ExpectRvalueLikeUnsized(ety) => {
246 ExpectRvalueLikeUnsized(ety)
252 /// Provide an expectation for an rvalue expression given an *optional*
253 /// hint, which is not required for type safety (the resulting type might
254 /// be checked higher up, as is the case with `&expr` and `box expr`), but
255 /// is useful in determining the concrete type.
257 /// The primary use case is where the expected type is a fat pointer,
258 /// like `&[isize]`. For example, consider the following statement:
260 /// let x: &[isize] = &[1, 2, 3];
262 /// In this case, the expected type for the `&[1, 2, 3]` expression is
263 /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
264 /// expectation `ExpectHasType([isize])`, that would be too strong --
265 /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
266 /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
267 /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
268 /// which still is useful, because it informs integer literals and the like.
269 /// See the test case `test/run-pass/coerce-expect-unsized.rs` and #20169
270 /// for examples of where this comes up,.
271 fn rvalue_hint(fcx: &FnCtxt<'a, 'gcx, 'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> {
272 match fcx.tcx.struct_tail(ty).sty {
273 ty::TySlice(_) | ty::TyStr | ty::TyTrait(..) => {
274 ExpectRvalueLikeUnsized(ty)
276 _ => ExpectHasType(ty)
280 // Resolves `expected` by a single level if it is a variable. If
281 // there is no expected type or resolution is not possible (e.g.,
282 // no constraints yet present), just returns `None`.
283 fn resolve(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Expectation<'tcx> {
288 ExpectCastableToType(t) => {
289 ExpectCastableToType(fcx.resolve_type_vars_if_possible(&t))
291 ExpectHasType(t) => {
292 ExpectHasType(fcx.resolve_type_vars_if_possible(&t))
294 ExpectRvalueLikeUnsized(t) => {
295 ExpectRvalueLikeUnsized(fcx.resolve_type_vars_if_possible(&t))
300 fn to_option(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
301 match self.resolve(fcx) {
302 NoExpectation => None,
303 ExpectCastableToType(ty) |
305 ExpectRvalueLikeUnsized(ty) => Some(ty),
309 fn only_has_type(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
310 match self.resolve(fcx) {
311 ExpectHasType(ty) => Some(ty),
317 #[derive(Copy, Clone)]
318 pub struct UnsafetyState {
319 pub def: ast::NodeId,
320 pub unsafety: hir::Unsafety,
321 pub unsafe_push_count: u32,
326 pub fn function(unsafety: hir::Unsafety, def: ast::NodeId) -> UnsafetyState {
327 UnsafetyState { def: def, unsafety: unsafety, unsafe_push_count: 0, from_fn: true }
330 pub fn recurse(&mut self, blk: &hir::Block) -> UnsafetyState {
331 match self.unsafety {
332 // If this unsafe, then if the outer function was already marked as
333 // unsafe we shouldn't attribute the unsafe'ness to the block. This
334 // way the block can be warned about instead of ignoring this
335 // extraneous block (functions are never warned about).
336 hir::Unsafety::Unsafe if self.from_fn => *self,
339 let (unsafety, def, count) = match blk.rules {
340 hir::PushUnsafeBlock(..) =>
341 (unsafety, blk.id, self.unsafe_push_count.checked_add(1).unwrap()),
342 hir::PopUnsafeBlock(..) =>
343 (unsafety, blk.id, self.unsafe_push_count.checked_sub(1).unwrap()),
344 hir::UnsafeBlock(..) =>
345 (hir::Unsafety::Unsafe, blk.id, self.unsafe_push_count),
346 hir::DefaultBlock | hir::PushUnstableBlock | hir:: PopUnstableBlock =>
347 (unsafety, self.def, self.unsafe_push_count),
349 UnsafetyState{ def: def,
351 unsafe_push_count: count,
359 pub struct FnCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
360 ast_ty_to_ty_cache: RefCell<NodeMap<Ty<'tcx>>>,
362 body_id: ast::NodeId,
364 // This flag is set to true if, during the writeback phase, we encounter
365 // a type error in this function.
366 writeback_errors: Cell<bool>,
368 // Number of errors that had been reported when we started
369 // checking this function. On exit, if we find that *more* errors
370 // have been reported, we will skip regionck and other work that
371 // expects the types within the function to be consistent.
372 err_count_on_creation: usize,
376 ps: RefCell<UnsafetyState>,
378 inh: &'a Inherited<'a, 'gcx, 'tcx>,
381 impl<'a, 'gcx, 'tcx> Deref for FnCtxt<'a, 'gcx, 'tcx> {
382 type Target = Inherited<'a, 'gcx, 'tcx>;
383 fn deref(&self) -> &Self::Target {
388 /// Helper type of a temporary returned by ccx.inherited(...).
389 /// Necessary because we can't write the following bound:
390 /// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Inherited<'b, 'gcx, 'tcx>).
391 pub struct InheritedBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
392 ccx: &'a CrateCtxt<'a, 'gcx>,
393 infcx: infer::InferCtxtBuilder<'a, 'gcx, 'tcx>
396 impl<'a, 'gcx, 'tcx> CrateCtxt<'a, 'gcx> {
397 pub fn inherited(&'a self, id: ast::NodeId)
398 -> InheritedBuilder<'a, 'gcx, 'tcx> {
399 let param_env = ParameterEnvironment::for_item(self.tcx, id);
402 infcx: self.tcx.infer_ctxt(Some(ty::Tables::empty()),
404 Reveal::NotSpecializable)
409 impl<'a, 'gcx, 'tcx> InheritedBuilder<'a, 'gcx, 'tcx> {
410 fn enter<F, R>(&'tcx mut self, f: F) -> R
411 where F: for<'b> FnOnce(Inherited<'b, 'gcx, 'tcx>) -> R
414 self.infcx.enter(|infcx| {
418 fulfillment_cx: RefCell::new(traits::FulfillmentContext::new()),
419 locals: RefCell::new(NodeMap()),
420 deferred_call_resolutions: RefCell::new(DefIdMap()),
421 deferred_cast_checks: RefCell::new(Vec::new()),
422 anon_types: RefCell::new(DefIdMap()),
423 deferred_obligations: RefCell::new(Vec::new()),
429 impl<'a, 'gcx, 'tcx> Inherited<'a, 'gcx, 'tcx> {
430 fn normalize_associated_types_in<T>(&self,
432 body_id: ast::NodeId,
435 where T : TypeFoldable<'tcx>
437 assoc::normalize_associated_types_in(self,
438 &mut self.fulfillment_cx.borrow_mut(),
446 struct CheckItemTypesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
447 struct CheckItemBodiesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
449 impl<'a, 'tcx> Visitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> {
450 fn visit_item(&mut self, i: &'tcx hir::Item) {
451 check_item_type(self.ccx, i);
452 intravisit::walk_item(self, i);
455 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
457 hir::TyFixedLengthVec(_, ref expr) => {
458 check_const_with_type(self.ccx, &expr, self.ccx.tcx.types.usize, expr.id);
463 intravisit::walk_ty(self, t);
467 impl<'a, 'tcx> Visitor<'tcx> for CheckItemBodiesVisitor<'a, 'tcx> {
468 fn visit_item(&mut self, i: &'tcx hir::Item) {
469 check_item_body(self.ccx, i);
473 pub fn check_wf_new(ccx: &CrateCtxt) -> CompileResult {
474 ccx.tcx.sess.track_errors(|| {
475 let mut visit = wfcheck::CheckTypeWellFormedVisitor::new(ccx);
476 ccx.tcx.visit_all_items_in_krate(DepNode::WfCheck, &mut visit);
480 pub fn check_item_types(ccx: &CrateCtxt) -> CompileResult {
481 ccx.tcx.sess.track_errors(|| {
482 let mut visit = CheckItemTypesVisitor { ccx: ccx };
483 ccx.tcx.visit_all_items_in_krate(DepNode::TypeckItemType, &mut visit);
487 pub fn check_item_bodies(ccx: &CrateCtxt) -> CompileResult {
488 ccx.tcx.sess.track_errors(|| {
489 let mut visit = CheckItemBodiesVisitor { ccx: ccx };
490 ccx.tcx.visit_all_items_in_krate(DepNode::TypeckItemBody, &mut visit);
492 // Process deferred obligations, now that all functions
493 // bodies have been fully inferred.
494 for (&item_id, obligations) in ccx.deferred_obligations.borrow().iter() {
495 // Use the same DepNode as for the body of the original function/item.
496 let def_id = ccx.tcx.map.local_def_id(item_id);
497 let _task = ccx.tcx.dep_graph.in_task(DepNode::TypeckItemBody(def_id));
499 let param_env = ParameterEnvironment::for_item(ccx.tcx, item_id);
500 ccx.tcx.infer_ctxt(None, Some(param_env),
501 Reveal::NotSpecializable).enter(|infcx| {
502 let mut fulfillment_cx = traits::FulfillmentContext::new();
503 for obligation in obligations.iter().map(|o| o.to_obligation()) {
504 fulfillment_cx.register_predicate_obligation(&infcx, obligation);
507 if let Err(errors) = fulfillment_cx.select_all_or_error(&infcx) {
508 infcx.report_fulfillment_errors(&errors);
511 if let Err(errors) = fulfillment_cx.select_rfc1592_obligations(&infcx) {
512 infcx.report_fulfillment_errors_as_warnings(&errors, item_id);
519 pub fn check_drop_impls(ccx: &CrateCtxt) -> CompileResult {
520 ccx.tcx.sess.track_errors(|| {
521 let _task = ccx.tcx.dep_graph.in_task(DepNode::Dropck);
522 let drop_trait = match ccx.tcx.lang_items.drop_trait() {
523 Some(id) => ccx.tcx.lookup_trait_def(id), None => { return }
525 drop_trait.for_each_impl(ccx.tcx, |drop_impl_did| {
526 let _task = ccx.tcx.dep_graph.in_task(DepNode::DropckImpl(drop_impl_did));
527 if drop_impl_did.is_local() {
528 match dropck::check_drop_impl(ccx, drop_impl_did) {
531 assert!(ccx.tcx.sess.has_errors());
539 fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
540 decl: &'tcx hir::FnDecl,
541 body: &'tcx hir::Block,
542 fn_id: ast::NodeId) {
543 let raw_fty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(fn_id)).ty;
544 let fn_ty = match raw_fty.sty {
545 ty::TyFnDef(_, _, f) => f,
546 _ => span_bug!(body.span, "check_bare_fn: function type expected")
549 ccx.inherited(fn_id).enter(|inh| {
550 // Compute the fty from point of view of inside fn.
551 let fn_scope = inh.tcx.region_maps.call_site_extent(fn_id, body.id);
553 fn_ty.sig.subst(inh.tcx, &inh.parameter_environment.free_substs);
555 inh.tcx.liberate_late_bound_regions(fn_scope, &fn_sig);
557 inh.normalize_associated_types_in(body.span, body.id, &fn_sig);
559 let fcx = check_fn(&inh, fn_ty.unsafety, fn_id, &fn_sig, decl, fn_id, body);
561 fcx.select_all_obligations_and_apply_defaults();
562 fcx.closure_analyze_fn(body);
563 fcx.select_obligations_where_possible();
565 fcx.select_all_obligations_or_error(); // Casts can introduce new obligations.
567 fcx.regionck_fn(fn_id, decl, body);
568 fcx.resolve_type_vars_in_fn(decl, body, fn_id);
572 struct GatherLocalsVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
573 fcx: &'a FnCtxt<'a, 'gcx, 'tcx>
576 impl<'a, 'gcx, 'tcx> GatherLocalsVisitor<'a, 'gcx, 'tcx> {
577 fn assign(&mut self, _span: Span, nid: ast::NodeId, ty_opt: Option<Ty<'tcx>>) -> Ty<'tcx> {
580 // infer the variable's type
581 let var_ty = self.fcx.next_ty_var();
582 self.fcx.locals.borrow_mut().insert(nid, var_ty);
586 // take type that the user specified
587 self.fcx.locals.borrow_mut().insert(nid, typ);
594 impl<'a, 'gcx, 'tcx> Visitor<'gcx> for GatherLocalsVisitor<'a, 'gcx, 'tcx> {
595 // Add explicitly-declared locals.
596 fn visit_local(&mut self, local: &'gcx hir::Local) {
597 let o_ty = match local.ty {
598 Some(ref ty) => Some(self.fcx.to_ty(&ty)),
601 self.assign(local.span, local.id, o_ty);
602 debug!("Local variable {:?} is assigned type {}",
604 self.fcx.ty_to_string(
605 self.fcx.locals.borrow().get(&local.id).unwrap().clone()));
606 intravisit::walk_local(self, local);
609 // Add pattern bindings.
610 fn visit_pat(&mut self, p: &'gcx hir::Pat) {
611 if let PatKind::Binding(_, ref path1, _) = p.node {
612 let var_ty = self.assign(p.span, p.id, None);
614 self.fcx.require_type_is_sized(var_ty, p.span,
615 traits::VariableType(p.id));
617 debug!("Pattern binding {} is assigned to {} with type {:?}",
619 self.fcx.ty_to_string(
620 self.fcx.locals.borrow().get(&p.id).unwrap().clone()),
623 intravisit::walk_pat(self, p);
626 fn visit_block(&mut self, b: &'gcx hir::Block) {
627 // non-obvious: the `blk` variable maps to region lb, so
628 // we have to keep this up-to-date. This
629 // is... unfortunate. It'd be nice to not need this.
630 intravisit::walk_block(self, b);
633 // Since an expr occurs as part of the type fixed size arrays we
634 // need to record the type for that node
635 fn visit_ty(&mut self, t: &'gcx hir::Ty) {
637 hir::TyFixedLengthVec(ref ty, ref count_expr) => {
639 self.fcx.check_expr_with_hint(&count_expr, self.fcx.tcx.types.usize);
641 hir::TyBareFn(ref function_declaration) => {
642 intravisit::walk_fn_decl_nopat(self, &function_declaration.decl);
643 walk_list!(self, visit_lifetime_def, &function_declaration.lifetimes);
645 _ => intravisit::walk_ty(self, t)
649 // Don't descend into the bodies of nested closures
650 fn visit_fn(&mut self, _: intravisit::FnKind<'gcx>, _: &'gcx hir::FnDecl,
651 _: &'gcx hir::Block, _: Span, _: ast::NodeId) { }
654 /// Helper used by check_bare_fn and check_expr_fn. Does the grungy work of checking a function
655 /// body and returns the function context used for that purpose, since in the case of a fn item
656 /// there is still a bit more to do.
659 /// * inherited: other fields inherited from the enclosing fn (if any)
660 fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>,
661 unsafety: hir::Unsafety,
662 unsafety_id: ast::NodeId,
663 fn_sig: &ty::FnSig<'tcx>,
664 decl: &'gcx hir::FnDecl,
666 body: &'gcx hir::Block)
667 -> FnCtxt<'a, 'gcx, 'tcx>
669 let mut fn_sig = fn_sig.clone();
671 debug!("check_fn(sig={:?}, fn_id={})", fn_sig, fn_id);
673 // Create the function context. This is either derived from scratch or,
674 // in the case of function expressions, based on the outer context.
675 let mut fcx = FnCtxt::new(inherited, fn_sig.output, body.id);
676 *fcx.ps.borrow_mut() = UnsafetyState::function(unsafety, unsafety_id);
678 fcx.require_type_is_sized(fcx.ret_ty, decl.output.span(), traits::ReturnType);
679 fcx.ret_ty = fcx.instantiate_anon_types(&fcx.ret_ty);
680 fn_sig.output = fcx.ret_ty;
683 let mut visit = GatherLocalsVisitor { fcx: &fcx, };
685 // Add formal parameters.
686 for (arg_ty, input) in fn_sig.inputs.iter().zip(&decl.inputs) {
687 // The type of the argument must be well-formed.
689 // NB -- this is now checked in wfcheck, but that
690 // currently only results in warnings, so we issue an
691 // old-style WF obligation here so that we still get the
692 // errors that we used to get.
693 fcx.register_old_wf_obligation(arg_ty, input.ty.span, traits::MiscObligation);
695 // Create type variables for each argument.
696 pat_util::pat_bindings(&input.pat, |_bm, pat_id, sp, _path| {
697 let var_ty = visit.assign(sp, pat_id, None);
698 fcx.require_type_is_sized(var_ty, sp, traits::VariableType(pat_id));
701 // Check the pattern.
702 fcx.check_pat(&input.pat, arg_ty);
703 fcx.write_ty(input.id, arg_ty);
706 visit.visit_block(body);
709 inherited.tables.borrow_mut().liberated_fn_sigs.insert(fn_id, fn_sig);
711 // FIXME(aburka) do we need this special case? and should it be is_uninhabited?
712 let expected = if fcx.ret_ty.is_never() {
715 ExpectHasType(fcx.ret_ty)
717 fcx.check_block_with_expected(body, expected);
722 pub fn check_struct(ccx: &CrateCtxt, id: ast::NodeId, span: Span) {
725 check_representable(tcx, span, id, "struct");
727 if tcx.lookup_simd(ccx.tcx.map.local_def_id(id)) {
728 check_simd(tcx, span, id);
732 pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
733 debug!("check_item_type(it.id={}, it.name={})",
735 ccx.tcx.item_path_str(ccx.tcx.map.local_def_id(it.id)));
736 let _indenter = indenter();
738 // Consts can play a role in type-checking, so they are included here.
739 hir::ItemStatic(_, _, ref e) |
740 hir::ItemConst(_, ref e) => check_const(ccx, &e, it.id),
741 hir::ItemEnum(ref enum_definition, _) => {
742 check_enum_variants(ccx,
744 &enum_definition.variants,
747 hir::ItemFn(..) => {} // entirely within check_item_body
748 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
749 debug!("ItemImpl {} with id {}", it.name, it.id);
750 let impl_def_id = ccx.tcx.map.local_def_id(it.id);
751 match ccx.tcx.impl_trait_ref(impl_def_id) {
752 Some(impl_trait_ref) => {
753 check_impl_items_against_trait(ccx,
758 let trait_def_id = impl_trait_ref.def_id;
759 check_on_unimplemented(ccx, trait_def_id, it);
764 hir::ItemTrait(..) => {
765 let def_id = ccx.tcx.map.local_def_id(it.id);
766 check_on_unimplemented(ccx, def_id, it);
768 hir::ItemStruct(..) => {
769 check_struct(ccx, it.id, it.span);
771 hir::ItemTy(_, ref generics) => {
772 let pty_ty = ccx.tcx.node_id_to_type(it.id);
773 check_bounds_are_used(ccx, &generics.ty_params, pty_ty);
775 hir::ItemForeignMod(ref m) => {
776 if m.abi == Abi::RustIntrinsic {
777 for item in &m.items {
778 intrinsic::check_intrinsic_type(ccx, item);
780 } else if m.abi == Abi::PlatformIntrinsic {
781 for item in &m.items {
782 intrinsic::check_platform_intrinsic_type(ccx, item);
785 for item in &m.items {
786 let pty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(item.id));
787 if !pty.generics.types.is_empty() {
788 let mut err = struct_span_err!(ccx.tcx.sess, item.span, E0044,
789 "foreign items may not have type parameters");
790 span_help!(&mut err, item.span,
791 "consider using specialization instead of \
796 if let hir::ForeignItemFn(ref fn_decl, _) = item.node {
797 require_c_abi_if_variadic(ccx.tcx, fn_decl, m.abi, item.span);
802 _ => {/* nothing to do */ }
806 pub fn check_item_body<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
807 debug!("check_item_body(it.id={}, it.name={})",
809 ccx.tcx.item_path_str(ccx.tcx.map.local_def_id(it.id)));
810 let _indenter = indenter();
812 hir::ItemFn(ref decl, _, _, _, _, ref body) => {
813 check_bare_fn(ccx, &decl, &body, it.id);
815 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
816 debug!("ItemImpl {} with id {}", it.name, it.id);
818 for impl_item in impl_items {
819 match impl_item.node {
820 hir::ImplItemKind::Const(_, ref expr) => {
821 check_const(ccx, &expr, impl_item.id)
823 hir::ImplItemKind::Method(ref sig, ref body) => {
824 check_bare_fn(ccx, &sig.decl, body, impl_item.id);
826 hir::ImplItemKind::Type(_) => {
827 // Nothing to do here.
832 hir::ItemTrait(_, _, _, ref trait_items) => {
833 for trait_item in trait_items {
834 match trait_item.node {
835 hir::ConstTraitItem(_, Some(ref expr)) => {
836 check_const(ccx, &expr, trait_item.id)
838 hir::MethodTraitItem(ref sig, Some(ref body)) => {
839 check_trait_fn_not_const(ccx, trait_item.span, sig.constness);
841 check_bare_fn(ccx, &sig.decl, body, trait_item.id);
843 hir::MethodTraitItem(ref sig, None) => {
844 check_trait_fn_not_const(ccx, trait_item.span, sig.constness);
846 hir::ConstTraitItem(_, None) |
847 hir::TypeTraitItem(..) => {
853 _ => {/* nothing to do */ }
857 fn check_trait_fn_not_const<'a,'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
859 constness: hir::Constness)
862 hir::Constness::NotConst => {
865 hir::Constness::Const => {
866 struct_span_err!(ccx.tcx.sess, span, E0379, "trait fns cannot be declared const")
867 .span_label(span, &format!("trait fns cannot be const"))
873 fn check_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
876 let generics = ccx.tcx.lookup_generics(def_id);
877 if let Some(ref attr) = item.attrs.iter().find(|a| {
878 a.check_name("rustc_on_unimplemented")
880 if let Some(ref istring) = attr.value_str() {
881 let parser = Parser::new(&istring);
882 let types = &generics.types;
883 for token in parser {
885 Piece::String(_) => (), // Normal string, no need to check it
886 Piece::NextArgument(a) => match a.position {
887 // `{Self}` is allowed
888 Position::ArgumentNamed(s) if s == "Self" => (),
889 // So is `{A}` if A is a type parameter
890 Position::ArgumentNamed(s) => match types.iter().find(|t| {
895 let name = ccx.tcx.item_name(def_id);
896 span_err!(ccx.tcx.sess, attr.span, E0230,
897 "there is no type parameter \
902 // `{:1}` and `{}` are not to be used
903 Position::ArgumentIs(_) => {
904 span_err!(ccx.tcx.sess, attr.span, E0231,
905 "only named substitution \
906 parameters are allowed");
913 ccx.tcx.sess, attr.span, E0232,
914 "this attribute must have a value")
915 .span_label(attr.span, &format!("attribute requires a value"))
916 .note(&format!("eg `#[rustc_on_unimplemented = \"foo\"]`"))
922 fn report_forbidden_specialization<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
923 impl_item: &hir::ImplItem,
926 let mut err = struct_span_err!(
927 tcx.sess, impl_item.span, E0520,
928 "item `{}` is provided by an `impl` that specializes \
929 another, but the item in the parent `impl` is not \
930 marked `default` and so it cannot be specialized.",
933 match tcx.span_of_impl(parent_impl) {
935 err.span_note(span, "parent implementation is here:");
938 err.note(&format!("parent implementation is in crate `{}`", cname));
945 fn check_specialization_validity<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
946 trait_def: &ty::TraitDef<'tcx>,
948 impl_item: &hir::ImplItem)
950 let ancestors = trait_def.ancestors(impl_id);
952 let parent = match impl_item.node {
953 hir::ImplItemKind::Const(..) => {
954 ancestors.const_defs(tcx, impl_item.name).skip(1).next()
955 .map(|node_item| node_item.map(|parent| parent.defaultness))
957 hir::ImplItemKind::Method(..) => {
958 ancestors.fn_defs(tcx, impl_item.name).skip(1).next()
959 .map(|node_item| node_item.map(|parent| parent.defaultness))
962 hir::ImplItemKind::Type(_) => {
963 ancestors.type_defs(tcx, impl_item.name).skip(1).next()
964 .map(|node_item| node_item.map(|parent| parent.defaultness))
968 if let Some(parent) = parent {
969 if parent.item.is_final() {
970 report_forbidden_specialization(tcx, impl_item, parent.node.def_id());
976 fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
979 impl_trait_ref: &ty::TraitRef<'tcx>,
980 impl_items: &[hir::ImplItem]) {
981 // If the trait reference itself is erroneous (so the compilation is going
982 // to fail), skip checking the items here -- the `impl_item` table in `tcx`
983 // isn't populated for such impls.
984 if impl_trait_ref.references_error() { return; }
986 // Locate trait definition and items
988 let trait_def = tcx.lookup_trait_def(impl_trait_ref.def_id);
989 let trait_items = tcx.trait_items(impl_trait_ref.def_id);
990 let mut overridden_associated_type = None;
992 // Check existing impl methods to see if they are both present in trait
993 // and compatible with trait signature
994 for impl_item in impl_items {
995 let ty_impl_item = tcx.impl_or_trait_item(tcx.map.local_def_id(impl_item.id));
996 let ty_trait_item = trait_items.iter()
997 .find(|ac| ac.name() == ty_impl_item.name());
999 // Check that impl definition matches trait definition
1000 if let Some(ty_trait_item) = ty_trait_item {
1001 match impl_item.node {
1002 hir::ImplItemKind::Const(..) => {
1003 let impl_const = match ty_impl_item {
1004 ty::ConstTraitItem(ref cti) => cti,
1005 _ => span_bug!(impl_item.span, "non-const impl-item for const")
1008 // Find associated const definition.
1009 if let &ty::ConstTraitItem(ref trait_const) = ty_trait_item {
1010 compare_const_impl(ccx,
1016 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0323,
1017 "item `{}` is an associated const, \
1018 which doesn't match its trait `{:?}`",
1021 err.span_label(impl_item.span, &format!("does not match trait"));
1022 // We can only get the spans from local trait definition
1023 // Same for E0324 and E0325
1024 if let Some(trait_span) = tcx.map.span_if_local(ty_trait_item.def_id()) {
1025 err.span_label(trait_span, &format!("item in trait"));
1030 hir::ImplItemKind::Method(ref sig, ref body) => {
1031 check_trait_fn_not_const(ccx, impl_item.span, sig.constness);
1033 let impl_method = match ty_impl_item {
1034 ty::MethodTraitItem(ref mti) => mti,
1035 _ => span_bug!(impl_item.span, "non-method impl-item for method")
1038 if let &ty::MethodTraitItem(ref trait_method) = ty_trait_item {
1039 compare_impl_method(ccx,
1046 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0324,
1047 "item `{}` is an associated method, \
1048 which doesn't match its trait `{:?}`",
1051 err.span_label(impl_item.span, &format!("does not match trait"));
1052 if let Some(trait_span) = tcx.map.span_if_local(ty_trait_item.def_id()) {
1053 err.span_label(trait_span, &format!("item in trait"));
1058 hir::ImplItemKind::Type(_) => {
1059 let impl_type = match ty_impl_item {
1060 ty::TypeTraitItem(ref tti) => tti,
1061 _ => span_bug!(impl_item.span, "non-type impl-item for type")
1064 if let &ty::TypeTraitItem(ref at) = ty_trait_item {
1065 if let Some(_) = at.ty {
1066 overridden_associated_type = Some(impl_item);
1069 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0325,
1070 "item `{}` is an associated type, \
1071 which doesn't match its trait `{:?}`",
1074 err.span_label(impl_item.span, &format!("does not match trait"));
1075 if let Some(trait_span) = tcx.map.span_if_local(ty_trait_item.def_id()) {
1076 err.span_label(trait_span, &format!("item in trait"));
1084 check_specialization_validity(tcx, trait_def, impl_id, impl_item);
1087 // Check for missing items from trait
1088 let provided_methods = tcx.provided_trait_methods(impl_trait_ref.def_id);
1089 let mut missing_items = Vec::new();
1090 let mut invalidated_items = Vec::new();
1091 let associated_type_overridden = overridden_associated_type.is_some();
1092 for trait_item in trait_items.iter() {
1097 ty::ConstTraitItem(ref associated_const) => {
1098 is_provided = associated_const.has_value;
1099 is_implemented = impl_items.iter().any(|ii| {
1101 hir::ImplItemKind::Const(..) => {
1102 ii.name == associated_const.name
1108 ty::MethodTraitItem(ref trait_method) => {
1109 is_provided = provided_methods.iter().any(|m| m.name == trait_method.name);
1110 is_implemented = trait_def.ancestors(impl_id)
1111 .fn_defs(tcx, trait_method.name)
1113 .map(|node_item| !node_item.node.is_from_trait())
1116 ty::TypeTraitItem(ref trait_assoc_ty) => {
1117 is_provided = trait_assoc_ty.ty.is_some();
1118 is_implemented = trait_def.ancestors(impl_id)
1119 .type_defs(tcx, trait_assoc_ty.name)
1121 .map(|node_item| !node_item.node.is_from_trait())
1126 if !is_implemented {
1128 missing_items.push(trait_item.name());
1129 } else if associated_type_overridden {
1130 invalidated_items.push(trait_item.name());
1135 if !missing_items.is_empty() {
1136 struct_span_err!(tcx.sess, impl_span, E0046,
1137 "not all trait items implemented, missing: `{}`",
1138 missing_items.iter()
1139 .map(|name| name.to_string())
1140 .collect::<Vec<_>>().join("`, `"))
1141 .span_label(impl_span, &format!("missing `{}` in implementation",
1142 missing_items.iter()
1143 .map(|name| name.to_string())
1144 .collect::<Vec<_>>().join("`, `"))
1148 if !invalidated_items.is_empty() {
1149 let invalidator = overridden_associated_type.unwrap();
1150 span_err!(tcx.sess, invalidator.span, E0399,
1151 "the following trait items need to be reimplemented \
1152 as `{}` was overridden: `{}`",
1154 invalidated_items.iter()
1155 .map(|name| name.to_string())
1156 .collect::<Vec<_>>().join("`, `"))
1160 /// Checks a constant with a given type.
1161 fn check_const_with_type<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
1162 expr: &'tcx hir::Expr,
1163 expected_type: Ty<'tcx>,
1165 ccx.inherited(id).enter(|inh| {
1166 let fcx = FnCtxt::new(&inh, expected_type, expr.id);
1167 fcx.require_type_is_sized(expected_type, expr.span, traits::ConstSized);
1169 // Gather locals in statics (because of block expressions).
1170 // This is technically unnecessary because locals in static items are forbidden,
1171 // but prevents type checking from blowing up before const checking can properly
1173 GatherLocalsVisitor { fcx: &fcx }.visit_expr(expr);
1175 fcx.check_expr_coercable_to_type(expr, expected_type);
1177 fcx.select_all_obligations_and_apply_defaults();
1178 fcx.closure_analyze_const(expr);
1179 fcx.select_obligations_where_possible();
1181 fcx.select_all_obligations_or_error();
1183 fcx.regionck_expr(expr);
1184 fcx.resolve_type_vars_in_expr(expr, id);
1188 fn check_const<'a, 'tcx>(ccx: &CrateCtxt<'a,'tcx>,
1189 expr: &'tcx hir::Expr,
1191 let decl_ty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(id)).ty;
1192 check_const_with_type(ccx, expr, decl_ty, id);
1195 /// Checks whether a type can be represented in memory. In particular, it
1196 /// identifies types that contain themselves without indirection through a
1197 /// pointer, which would mean their size is unbounded.
1198 pub fn check_representable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
1200 item_id: ast::NodeId,
1201 _designation: &str) -> bool {
1202 let rty = tcx.node_id_to_type(item_id);
1204 // Check that it is possible to represent this type. This call identifies
1205 // (1) types that contain themselves and (2) types that contain a different
1206 // recursive type. It is only necessary to throw an error on those that
1207 // contain themselves. For case 2, there must be an inner type that will be
1208 // caught by case 1.
1209 match rty.is_representable(tcx, sp) {
1210 Representability::SelfRecursive => {
1211 let item_def_id = tcx.map.local_def_id(item_id);
1212 tcx.recursive_type_with_infinite_size_error(item_def_id).emit();
1215 Representability::Representable | Representability::ContainsRecursive => (),
1220 pub fn check_simd<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span, id: ast::NodeId) {
1221 let t = tcx.node_id_to_type(id);
1223 ty::TyStruct(def, substs) => {
1224 let fields = &def.struct_variant().fields;
1225 if fields.is_empty() {
1226 span_err!(tcx.sess, sp, E0075, "SIMD vector cannot be empty");
1229 let e = fields[0].ty(tcx, substs);
1230 if !fields.iter().all(|f| f.ty(tcx, substs) == e) {
1231 struct_span_err!(tcx.sess, sp, E0076, "SIMD vector should be homogeneous")
1232 .span_label(sp, &format!("SIMD elements must have the same type"))
1237 ty::TyParam(_) => { /* struct<T>(T, T, T, T) is ok */ }
1238 _ if e.is_machine() => { /* struct(u8, u8, u8, u8) is ok */ }
1240 span_err!(tcx.sess, sp, E0077,
1241 "SIMD vector element type should be machine type");
1250 #[allow(trivial_numeric_casts)]
1251 pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
1253 vs: &'tcx [hir::Variant],
1255 let def_id = ccx.tcx.map.local_def_id(id);
1256 let hint = *ccx.tcx.lookup_repr_hints(def_id).get(0).unwrap_or(&attr::ReprAny);
1258 if hint != attr::ReprAny && vs.is_empty() {
1260 ccx.tcx.sess, sp, E0084,
1261 "unsupported representation for zero-variant enum")
1262 .span_label(sp, &format!("unsupported enum representation"))
1266 let repr_type_ty = ccx.tcx.enum_repr_type(Some(&hint)).to_ty(ccx.tcx);
1268 if let Some(ref e) = v.node.disr_expr {
1269 check_const_with_type(ccx, e, repr_type_ty, e.id);
1273 let def_id = ccx.tcx.map.local_def_id(id);
1275 let variants = &ccx.tcx.lookup_adt_def(def_id).variants;
1276 let mut disr_vals: Vec<ty::Disr> = Vec::new();
1277 for (v, variant) in vs.iter().zip(variants.iter()) {
1278 let current_disr_val = variant.disr_val;
1280 // Check for duplicate discriminant values
1281 if let Some(i) = disr_vals.iter().position(|&x| x == current_disr_val) {
1282 let variant_i_node_id = ccx.tcx.map.as_local_node_id(variants[i].did).unwrap();
1283 let variant_i = ccx.tcx.map.expect_variant(variant_i_node_id);
1284 let i_span = match variant_i.node.disr_expr {
1285 Some(ref expr) => expr.span,
1286 None => ccx.tcx.map.span(variant_i_node_id)
1288 let span = match v.node.disr_expr {
1289 Some(ref expr) => expr.span,
1292 struct_span_err!(ccx.tcx.sess, span, E0081,
1293 "discriminant value `{}` already exists", disr_vals[i])
1294 .span_label(i_span, &format!("first use of `{}`", disr_vals[i]))
1295 .span_label(span , &format!("enum already has `{}`", disr_vals[i]))
1298 disr_vals.push(current_disr_val);
1301 check_representable(ccx.tcx, sp, id, "enum");
1304 impl<'a, 'gcx, 'tcx> AstConv<'gcx, 'tcx> for FnCtxt<'a, 'gcx, 'tcx> {
1305 fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
1307 fn ast_ty_to_ty_cache(&self) -> &RefCell<NodeMap<Ty<'tcx>>> {
1308 &self.ast_ty_to_ty_cache
1311 fn get_generics(&self, _: Span, id: DefId)
1312 -> Result<&'tcx ty::Generics<'tcx>, ErrorReported>
1314 Ok(self.tcx().lookup_generics(id))
1317 fn get_item_type_scheme(&self, _: Span, id: DefId)
1318 -> Result<ty::TypeScheme<'tcx>, ErrorReported>
1320 Ok(self.tcx().lookup_item_type(id))
1323 fn get_trait_def(&self, _: Span, id: DefId)
1324 -> Result<&'tcx ty::TraitDef<'tcx>, ErrorReported>
1326 Ok(self.tcx().lookup_trait_def(id))
1329 fn ensure_super_predicates(&self, _: Span, _: DefId) -> Result<(), ErrorReported> {
1330 // all super predicates are ensured during collect pass
1334 fn get_free_substs(&self) -> Option<&Substs<'tcx>> {
1335 Some(&self.parameter_environment.free_substs)
1338 fn get_type_parameter_bounds(&self,
1340 node_id: ast::NodeId)
1341 -> Result<Vec<ty::PolyTraitRef<'tcx>>, ErrorReported>
1343 let def = self.tcx.type_parameter_def(node_id);
1344 let r = self.parameter_environment
1347 .filter_map(|predicate| {
1349 ty::Predicate::Trait(ref data) => {
1350 if data.0.self_ty().is_param(def.index) {
1351 Some(data.to_poly_trait_ref())
1365 fn trait_defines_associated_type_named(&self,
1366 trait_def_id: DefId,
1367 assoc_name: ast::Name)
1370 let trait_def = self.tcx().lookup_trait_def(trait_def_id);
1371 trait_def.associated_type_names.contains(&assoc_name)
1374 fn ty_infer(&self, _span: Span) -> Ty<'tcx> {
1378 fn ty_infer_for_def(&self,
1379 ty_param_def: &ty::TypeParameterDef<'tcx>,
1380 substs: &Substs<'tcx>,
1381 span: Span) -> Ty<'tcx> {
1382 self.type_var_for_def(span, ty_param_def, substs)
1385 fn projected_ty_from_poly_trait_ref(&self,
1387 poly_trait_ref: ty::PolyTraitRef<'tcx>,
1388 item_name: ast::Name)
1391 let (trait_ref, _) =
1392 self.replace_late_bound_regions_with_fresh_var(
1394 infer::LateBoundRegionConversionTime::AssocTypeProjection(item_name),
1397 self.normalize_associated_type(span, trait_ref, item_name)
1400 fn projected_ty(&self,
1402 trait_ref: ty::TraitRef<'tcx>,
1403 item_name: ast::Name)
1406 self.normalize_associated_type(span, trait_ref, item_name)
1409 fn set_tainted_by_errors(&self) {
1410 self.infcx.set_tainted_by_errors()
1414 impl<'a, 'gcx, 'tcx> RegionScope for FnCtxt<'a, 'gcx, 'tcx> {
1415 fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> {
1416 Some(self.base_object_lifetime_default(span))
1419 fn base_object_lifetime_default(&self, span: Span) -> ty::Region {
1420 // RFC #599 specifies that object lifetime defaults take
1421 // precedence over other defaults. But within a fn body we
1422 // don't have a *default* region, rather we use inference to
1423 // find the *correct* region, which is strictly more general
1424 // (and anyway, within a fn body the right region may not even
1425 // be something the user can write explicitly, since it might
1426 // be some expression).
1427 self.next_region_var(infer::MiscVariable(span))
1430 fn anon_regions(&self, span: Span, count: usize)
1431 -> Result<Vec<ty::Region>, Option<Vec<ElisionFailureInfo>>> {
1432 Ok((0..count).map(|_| {
1433 self.next_region_var(infer::MiscVariable(span))
1438 /// Controls whether the arguments are tupled. This is used for the call
1441 /// Tupling means that all call-side arguments are packed into a tuple and
1442 /// passed as a single parameter. For example, if tupling is enabled, this
1445 /// fn f(x: (isize, isize))
1447 /// Can be called as:
1454 #[derive(Clone, Eq, PartialEq)]
1455 enum TupleArgumentsFlag {
1460 impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
1461 pub fn new(inh: &'a Inherited<'a, 'gcx, 'tcx>,
1463 body_id: ast::NodeId)
1464 -> FnCtxt<'a, 'gcx, 'tcx> {
1466 ast_ty_to_ty_cache: RefCell::new(NodeMap()),
1468 writeback_errors: Cell::new(false),
1469 err_count_on_creation: inh.tcx.sess.err_count(),
1471 ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, 0)),
1476 pub fn param_env(&self) -> &ty::ParameterEnvironment<'tcx> {
1477 &self.parameter_environment
1480 pub fn sess(&self) -> &Session {
1484 pub fn err_count_since_creation(&self) -> usize {
1485 self.tcx.sess.err_count() - self.err_count_on_creation
1488 /// Resolves type variables in `ty` if possible. Unlike the infcx
1489 /// version (resolve_type_vars_if_possible), this version will
1490 /// also select obligations if it seems useful, in an effort
1491 /// to get more type information.
1492 fn resolve_type_vars_with_obligations(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> {
1493 debug!("resolve_type_vars_with_obligations(ty={:?})", ty);
1495 // No TyInfer()? Nothing needs doing.
1496 if !ty.has_infer_types() {
1497 debug!("resolve_type_vars_with_obligations: ty={:?}", ty);
1501 // If `ty` is a type variable, see whether we already know what it is.
1502 ty = self.resolve_type_vars_if_possible(&ty);
1503 if !ty.has_infer_types() {
1504 debug!("resolve_type_vars_with_obligations: ty={:?}", ty);
1508 // If not, try resolving pending obligations as much as
1509 // possible. This can help substantially when there are
1510 // indirect dependencies that don't seem worth tracking
1512 self.select_obligations_where_possible();
1513 ty = self.resolve_type_vars_if_possible(&ty);
1515 debug!("resolve_type_vars_with_obligations: ty={:?}", ty);
1519 fn record_deferred_call_resolution(&self,
1520 closure_def_id: DefId,
1521 r: DeferredCallResolutionHandler<'gcx, 'tcx>) {
1522 let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
1523 deferred_call_resolutions.entry(closure_def_id).or_insert(vec![]).push(r);
1526 fn remove_deferred_call_resolutions(&self,
1527 closure_def_id: DefId)
1528 -> Vec<DeferredCallResolutionHandler<'gcx, 'tcx>>
1530 let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
1531 deferred_call_resolutions.remove(&closure_def_id).unwrap_or(Vec::new())
1534 pub fn tag(&self) -> String {
1535 let self_ptr: *const FnCtxt = self;
1536 format!("{:?}", self_ptr)
1539 pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> Ty<'tcx> {
1540 match self.locals.borrow().get(&nid) {
1543 span_err!(self.tcx.sess, span, E0513,
1544 "no type for local variable {}",
1552 pub fn write_ty(&self, node_id: ast::NodeId, ty: Ty<'tcx>) {
1553 debug!("write_ty({}, {:?}) in fcx {}",
1554 node_id, ty, self.tag());
1555 self.tables.borrow_mut().node_types.insert(node_id, ty);
1557 // Add adjustments to !-expressions
1559 if let Some(hir::map::NodeExpr(_)) = self.tcx.map.find(node_id) {
1560 let adj = adjustment::AdjustNeverToAny(self.next_diverging_ty_var());
1561 self.write_adjustment(node_id, adj);
1566 pub fn write_substs(&self, node_id: ast::NodeId, substs: ty::ItemSubsts<'tcx>) {
1567 if !substs.substs.is_noop() {
1568 debug!("write_substs({}, {:?}) in fcx {}",
1573 self.tables.borrow_mut().item_substs.insert(node_id, substs);
1577 pub fn write_autoderef_adjustment(&self,
1578 node_id: ast::NodeId,
1580 self.write_adjustment(
1582 adjustment::AdjustDerefRef(adjustment::AutoDerefRef {
1590 pub fn write_adjustment(&self,
1591 node_id: ast::NodeId,
1592 adj: adjustment::AutoAdjustment<'tcx>) {
1593 debug!("write_adjustment(node_id={}, adj={:?})", node_id, adj);
1595 if adj.is_identity() {
1599 self.tables.borrow_mut().adjustments.insert(node_id, adj);
1602 /// Basically whenever we are converting from a type scheme into
1603 /// the fn body space, we always want to normalize associated
1604 /// types as well. This function combines the two.
1605 fn instantiate_type_scheme<T>(&self,
1607 substs: &Substs<'tcx>,
1610 where T : TypeFoldable<'tcx>
1612 let value = value.subst(self.tcx, substs);
1613 let result = self.normalize_associated_types_in(span, &value);
1614 debug!("instantiate_type_scheme(value={:?}, substs={:?}) = {:?}",
1621 /// As `instantiate_type_scheme`, but for the bounds found in a
1622 /// generic type scheme.
1623 fn instantiate_bounds(&self,
1625 substs: &Substs<'tcx>,
1626 bounds: &ty::GenericPredicates<'tcx>)
1627 -> ty::InstantiatedPredicates<'tcx>
1629 let result = bounds.instantiate(self.tcx, substs);
1630 let result = self.normalize_associated_types_in(span, &result.predicates);
1631 debug!("instantiate_bounds(bounds={:?}, substs={:?}) = {:?}",
1635 ty::InstantiatedPredicates {
1640 /// Replace all anonymized types with fresh inference variables
1641 /// and record them for writeback.
1642 fn instantiate_anon_types<T: TypeFoldable<'tcx>>(&self, value: &T) -> T {
1643 value.fold_with(&mut BottomUpFolder { tcx: self.tcx, fldop: |ty| {
1644 if let ty::TyAnon(def_id, substs) = ty.sty {
1645 // Use the same type variable if the exact same TyAnon appears more
1646 // than once in the return type (e.g. if it's pased to a type alias).
1647 if let Some(ty_var) = self.anon_types.borrow().get(&def_id) {
1650 let ty_var = self.next_ty_var();
1651 self.anon_types.borrow_mut().insert(def_id, ty_var);
1653 let item_predicates = self.tcx.lookup_predicates(def_id);
1654 let bounds = item_predicates.instantiate(self.tcx, substs);
1656 let span = self.tcx.map.def_id_span(def_id, codemap::DUMMY_SP);
1657 for predicate in bounds.predicates {
1658 // Change the predicate to refer to the type variable,
1659 // which will be the concrete type, instead of the TyAnon.
1660 // This also instantiates nested `impl Trait`.
1661 let predicate = self.instantiate_anon_types(&predicate);
1663 // Require that the predicate holds for the concrete type.
1664 let cause = traits::ObligationCause::new(span, self.body_id,
1665 traits::ReturnType);
1666 self.register_predicate(traits::Obligation::new(cause, predicate));
1676 fn normalize_associated_types_in<T>(&self, span: Span, value: &T) -> T
1677 where T : TypeFoldable<'tcx>
1679 self.inh.normalize_associated_types_in(span, self.body_id, value)
1682 fn normalize_associated_type(&self,
1684 trait_ref: ty::TraitRef<'tcx>,
1685 item_name: ast::Name)
1688 let cause = traits::ObligationCause::new(span,
1690 traits::ObligationCauseCode::MiscObligation);
1693 .normalize_projection_type(self,
1695 trait_ref: trait_ref,
1696 item_name: item_name,
1701 /// Instantiates the type in `did` with the generics in `path` and returns
1702 /// it (registering the necessary trait obligations along the way).
1704 /// Note that this function is only intended to be used with type-paths,
1705 /// not with value-paths.
1706 pub fn instantiate_type_path(&self,
1709 node_id: ast::NodeId)
1711 debug!("instantiate_type_path(did={:?}, path={:?})", did, path);
1712 let mut ty = self.tcx.lookup_item_type(did).ty;
1714 // Tuple variants have fn type even in type namespace, extract true variant type from it
1715 ty = self.tcx.no_late_bound_regions(&ty.fn_ret()).unwrap();
1717 let type_predicates = self.tcx.lookup_predicates(did);
1718 let substs = AstConv::ast_path_substs_for_ty(self, self,
1720 PathParamMode::Optional,
1722 path.segments.last().unwrap());
1723 debug!("instantiate_type_path: ty={:?} substs={:?}", ty, substs);
1724 let bounds = self.instantiate_bounds(path.span, substs, &type_predicates);
1725 let cause = traits::ObligationCause::new(path.span, self.body_id,
1726 traits::ItemObligation(did));
1727 self.add_obligations_for_parameters(cause, &bounds);
1729 let ty_substituted = self.instantiate_type_scheme(path.span, substs, &ty);
1730 self.write_ty(node_id, ty_substituted);
1731 self.write_substs(node_id, ty::ItemSubsts {
1737 pub fn write_nil(&self, node_id: ast::NodeId) {
1738 self.write_ty(node_id, self.tcx.mk_nil());
1741 pub fn write_never(&self, node_id: ast::NodeId) {
1742 self.write_ty(node_id, self.tcx.types.never);
1745 pub fn write_error(&self, node_id: ast::NodeId) {
1746 self.write_ty(node_id, self.tcx.types.err);
1749 pub fn require_type_meets(&self,
1752 code: traits::ObligationCauseCode<'tcx>,
1753 bound: ty::BuiltinBound)
1755 self.register_builtin_bound(
1758 traits::ObligationCause::new(span, self.body_id, code));
1761 pub fn require_type_is_sized(&self,
1764 code: traits::ObligationCauseCode<'tcx>)
1766 self.require_type_meets(ty, span, code, ty::BoundSized);
1769 pub fn require_expr_have_sized_type(&self,
1771 code: traits::ObligationCauseCode<'tcx>)
1773 self.require_type_is_sized(self.expr_ty(expr), expr.span, code);
1776 pub fn register_builtin_bound(&self,
1778 builtin_bound: ty::BuiltinBound,
1779 cause: traits::ObligationCause<'tcx>)
1781 self.fulfillment_cx.borrow_mut()
1782 .register_builtin_bound(self, ty, builtin_bound, cause);
1785 pub fn register_predicate(&self,
1786 obligation: traits::PredicateObligation<'tcx>)
1788 debug!("register_predicate({:?})",
1792 .register_predicate_obligation(self, obligation);
1795 pub fn to_ty(&self, ast_t: &hir::Ty) -> Ty<'tcx> {
1796 let t = AstConv::ast_ty_to_ty(self, self, ast_t);
1797 self.register_wf_obligation(t, ast_t.span, traits::MiscObligation);
1801 pub fn expr_ty(&self, ex: &hir::Expr) -> Ty<'tcx> {
1802 if let Some(&adjustment::AdjustNeverToAny(ref t))
1803 = self.tables.borrow().adjustments.get(&ex.id) {
1806 match self.tables.borrow().node_types.get(&ex.id) {
1809 bug!("no type for expr in fcx {}", self.tag());
1814 /// Apply `adjustment` to the type of `expr`
1815 pub fn adjust_expr_ty(&self,
1817 adjustment: Option<&adjustment::AutoAdjustment<'tcx>>)
1820 let raw_ty = self.expr_ty(expr);
1821 let raw_ty = self.shallow_resolve(raw_ty);
1822 let resolve_ty = |ty: Ty<'tcx>| self.resolve_type_vars_if_possible(&ty);
1823 raw_ty.adjust(self.tcx, expr.span, expr.id, adjustment, |method_call| {
1824 self.tables.borrow().method_map.get(&method_call)
1825 .map(|method| resolve_ty(method.ty))
1829 pub fn node_ty(&self, id: ast::NodeId) -> Ty<'tcx> {
1830 match self.tables.borrow().node_types.get(&id) {
1832 None if self.err_count_since_creation() != 0 => self.tcx.types.err,
1834 bug!("no type for node {}: {} in fcx {}",
1835 id, self.tcx.map.node_to_string(id),
1841 pub fn item_substs(&self) -> Ref<NodeMap<ty::ItemSubsts<'tcx>>> {
1842 // NOTE: @jroesch this is hack that appears to be fixed on nightly, will monitor if
1843 // it changes when we upgrade the snapshot compiler
1844 fn project_item_susbts<'a, 'tcx>(tables: &'a ty::Tables<'tcx>)
1845 -> &'a NodeMap<ty::ItemSubsts<'tcx>> {
1849 Ref::map(self.tables.borrow(), project_item_susbts)
1852 pub fn opt_node_ty_substs<F>(&self,
1855 F: FnOnce(&ty::ItemSubsts<'tcx>),
1857 match self.tables.borrow().item_substs.get(&id) {
1863 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1864 /// outlive the region `r`.
1865 pub fn register_region_obligation(&self,
1868 cause: traits::ObligationCause<'tcx>)
1870 let mut fulfillment_cx = self.fulfillment_cx.borrow_mut();
1871 fulfillment_cx.register_region_obligation(ty, region, cause);
1874 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1875 /// outlive the region `r`.
1876 pub fn register_wf_obligation(&self,
1879 code: traits::ObligationCauseCode<'tcx>)
1881 // WF obligations never themselves fail, so no real need to give a detailed cause:
1882 let cause = traits::ObligationCause::new(span, self.body_id, code);
1883 self.register_predicate(traits::Obligation::new(cause, ty::Predicate::WellFormed(ty)));
1886 pub fn register_old_wf_obligation(&self,
1889 code: traits::ObligationCauseCode<'tcx>)
1891 // Registers an "old-style" WF obligation that uses the
1892 // implicator code. This is basically a buggy version of
1893 // `register_wf_obligation` that is being kept around
1894 // temporarily just to help with phasing in the newer rules.
1896 // FIXME(#27579) all uses of this should be migrated to register_wf_obligation eventually
1897 let cause = traits::ObligationCause::new(span, self.body_id, code);
1898 self.register_region_obligation(ty, ty::ReEmpty, cause);
1901 /// Registers obligations that all types appearing in `substs` are well-formed.
1902 pub fn add_wf_bounds(&self, substs: &Substs<'tcx>, expr: &hir::Expr)
1904 for &ty in &substs.types {
1905 self.register_wf_obligation(ty, expr.span, traits::MiscObligation);
1909 /// Given a fully substituted set of bounds (`generic_bounds`), and the values with which each
1910 /// type/region parameter was instantiated (`substs`), creates and registers suitable
1911 /// trait/region obligations.
1913 /// For example, if there is a function:
1916 /// fn foo<'a,T:'a>(...)
1919 /// and a reference:
1925 /// Then we will create a fresh region variable `'$0` and a fresh type variable `$1` for `'a`
1926 /// and `T`. This routine will add a region obligation `$1:'$0` and register it locally.
1927 pub fn add_obligations_for_parameters(&self,
1928 cause: traits::ObligationCause<'tcx>,
1929 predicates: &ty::InstantiatedPredicates<'tcx>)
1931 assert!(!predicates.has_escaping_regions());
1933 debug!("add_obligations_for_parameters(predicates={:?})",
1936 for obligation in traits::predicates_for_generics(cause, predicates) {
1937 self.register_predicate(obligation);
1941 // FIXME(arielb1): use this instead of field.ty everywhere
1942 // Only for fields! Returns <none> for methods>
1943 // Indifferent to privacy flags
1944 pub fn field_ty(&self,
1946 field: ty::FieldDef<'tcx>,
1947 substs: &Substs<'tcx>)
1950 self.normalize_associated_types_in(span,
1951 &field.ty(self.tcx, substs))
1954 fn check_casts(&self) {
1955 let mut deferred_cast_checks = self.deferred_cast_checks.borrow_mut();
1956 for cast in deferred_cast_checks.drain(..) {
1961 /// Apply "fallbacks" to some types
1962 /// ! gets replaced with (), unconstrained ints with i32, and unconstrained floats with f64.
1963 fn default_type_parameters(&self) {
1964 use rustc::ty::error::UnconstrainedNumeric::Neither;
1965 use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1967 // Defaulting inference variables becomes very dubious if we have
1968 // encountered type-checking errors. Therefore, if we think we saw
1969 // some errors in this function, just resolve all uninstanted type
1970 // varibles to TyError.
1971 if self.is_tainted_by_errors() {
1972 for ty in &self.unsolved_variables() {
1973 if let ty::TyInfer(_) = self.shallow_resolve(ty).sty {
1974 debug!("default_type_parameters: defaulting `{:?}` to error", ty);
1975 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx().types.err);
1981 for ty in &self.unsolved_variables() {
1982 let resolved = self.resolve_type_vars_if_possible(ty);
1983 if self.type_var_diverges(resolved) {
1984 debug!("default_type_parameters: defaulting `{:?}` to `!` because it diverges",
1986 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
1987 self.tcx.mk_diverging_default());
1989 match self.type_is_unconstrained_numeric(resolved) {
1990 UnconstrainedInt => {
1991 debug!("default_type_parameters: defaulting `{:?}` to `i32`",
1993 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
1995 UnconstrainedFloat => {
1996 debug!("default_type_parameters: defaulting `{:?}` to `f32`",
1998 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
2006 fn select_all_obligations_and_apply_defaults(&self) {
2007 if self.tcx.sess.features.borrow().default_type_parameter_fallback {
2008 self.new_select_all_obligations_and_apply_defaults();
2010 self.old_select_all_obligations_and_apply_defaults();
2014 // Implements old type inference fallback algorithm
2015 fn old_select_all_obligations_and_apply_defaults(&self) {
2016 self.select_obligations_where_possible();
2017 self.default_type_parameters();
2018 self.select_obligations_where_possible();
2021 fn new_select_all_obligations_and_apply_defaults(&self) {
2022 use rustc::ty::error::UnconstrainedNumeric::Neither;
2023 use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
2025 // For the time being this errs on the side of being memory wasteful but provides better
2027 // let type_variables = self.type_variables.clone();
2029 // There is a possibility that this algorithm will have to run an arbitrary number of times
2030 // to terminate so we bound it by the compiler's recursion limit.
2031 for _ in 0..self.tcx.sess.recursion_limit.get() {
2032 // First we try to solve all obligations, it is possible that the last iteration
2033 // has made it possible to make more progress.
2034 self.select_obligations_where_possible();
2036 let mut conflicts = Vec::new();
2038 // Collect all unsolved type, integral and floating point variables.
2039 let unsolved_variables = self.unsolved_variables();
2041 // We must collect the defaults *before* we do any unification. Because we have
2042 // directly attached defaults to the type variables any unification that occurs
2043 // will erase defaults causing conflicting defaults to be completely ignored.
2044 let default_map: FnvHashMap<_, _> =
2047 .filter_map(|t| self.default(t).map(|d| (t, d)))
2050 let mut unbound_tyvars = HashSet::new();
2052 debug!("select_all_obligations_and_apply_defaults: defaults={:?}", default_map);
2054 // We loop over the unsolved variables, resolving them and if they are
2055 // and unconstrainted numeric type we add them to the set of unbound
2056 // variables. We do this so we only apply literal fallback to type
2057 // variables without defaults.
2058 for ty in &unsolved_variables {
2059 let resolved = self.resolve_type_vars_if_possible(ty);
2060 if self.type_var_diverges(resolved) {
2061 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
2062 self.tcx.mk_diverging_default());
2064 match self.type_is_unconstrained_numeric(resolved) {
2065 UnconstrainedInt | UnconstrainedFloat => {
2066 unbound_tyvars.insert(resolved);
2073 // We now remove any numeric types that also have defaults, and instead insert
2074 // the type variable with a defined fallback.
2075 for ty in &unsolved_variables {
2076 if let Some(_default) = default_map.get(ty) {
2077 let resolved = self.resolve_type_vars_if_possible(ty);
2079 debug!("select_all_obligations_and_apply_defaults: \
2080 ty: {:?} with default: {:?}",
2083 match resolved.sty {
2084 ty::TyInfer(ty::TyVar(_)) => {
2085 unbound_tyvars.insert(ty);
2088 ty::TyInfer(ty::IntVar(_)) | ty::TyInfer(ty::FloatVar(_)) => {
2089 unbound_tyvars.insert(ty);
2090 if unbound_tyvars.contains(resolved) {
2091 unbound_tyvars.remove(resolved);
2100 // If there are no more fallbacks to apply at this point we have applied all possible
2101 // defaults and type inference will proceed as normal.
2102 if unbound_tyvars.is_empty() {
2106 // Finally we go through each of the unbound type variables and unify them with
2107 // the proper fallback, reporting a conflicting default error if any of the
2108 // unifications fail. We know it must be a conflicting default because the
2109 // variable would only be in `unbound_tyvars` and have a concrete value if
2110 // it had been solved by previously applying a default.
2112 // We wrap this in a transaction for error reporting, if we detect a conflict
2113 // we will rollback the inference context to its prior state so we can probe
2114 // for conflicts and correctly report them.
2117 let _ = self.commit_if_ok(|_: &infer::CombinedSnapshot| {
2118 for ty in &unbound_tyvars {
2119 if self.type_var_diverges(ty) {
2120 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
2121 self.tcx.mk_diverging_default());
2123 match self.type_is_unconstrained_numeric(ty) {
2124 UnconstrainedInt => {
2125 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
2127 UnconstrainedFloat => {
2128 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
2131 if let Some(default) = default_map.get(ty) {
2132 let default = default.clone();
2133 match self.eq_types(false,
2134 TypeOrigin::Misc(default.origin_span),
2136 Ok(InferOk { obligations, .. }) => {
2137 // FIXME(#32730) propagate obligations
2138 assert!(obligations.is_empty())
2141 conflicts.push((*ty, default));
2150 // If there are conflicts we rollback, otherwise commit
2151 if conflicts.len() > 0 {
2158 if conflicts.len() > 0 {
2159 // Loop through each conflicting default, figuring out the default that caused
2160 // a unification failure and then report an error for each.
2161 for (conflict, default) in conflicts {
2162 let conflicting_default =
2163 self.find_conflicting_default(&unbound_tyvars, &default_map, conflict)
2164 .unwrap_or(type_variable::Default {
2165 ty: self.next_ty_var(),
2166 origin_span: syntax_pos::DUMMY_SP,
2167 def_id: self.tcx.map.local_def_id(0) // what do I put here?
2170 // This is to ensure that we elimnate any non-determinism from the error
2171 // reporting by fixing an order, it doesn't matter what order we choose
2172 // just that it is consistent.
2173 let (first_default, second_default) =
2174 if default.def_id < conflicting_default.def_id {
2175 (default, conflicting_default)
2177 (conflicting_default, default)
2181 self.report_conflicting_default_types(
2182 first_default.origin_span,
2189 self.select_obligations_where_possible();
2192 // For use in error handling related to default type parameter fallback. We explicitly
2193 // apply the default that caused conflict first to a local version of the type variable
2194 // table then apply defaults until we find a conflict. That default must be the one
2195 // that caused conflict earlier.
2196 fn find_conflicting_default(&self,
2197 unbound_vars: &HashSet<Ty<'tcx>>,
2198 default_map: &FnvHashMap<&Ty<'tcx>, type_variable::Default<'tcx>>,
2200 -> Option<type_variable::Default<'tcx>> {
2201 use rustc::ty::error::UnconstrainedNumeric::Neither;
2202 use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
2204 // Ensure that we apply the conflicting default first
2205 let mut unbound_tyvars = Vec::with_capacity(unbound_vars.len() + 1);
2206 unbound_tyvars.push(conflict);
2207 unbound_tyvars.extend(unbound_vars.iter());
2209 let mut result = None;
2210 // We run the same code as above applying defaults in order, this time when
2211 // we find the conflict we just return it for error reporting above.
2213 // We also run this inside snapshot that never commits so we can do error
2214 // reporting for more then one conflict.
2215 for ty in &unbound_tyvars {
2216 if self.type_var_diverges(ty) {
2217 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty,
2218 self.tcx.mk_diverging_default());
2220 match self.type_is_unconstrained_numeric(ty) {
2221 UnconstrainedInt => {
2222 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
2224 UnconstrainedFloat => {
2225 self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
2228 if let Some(default) = default_map.get(ty) {
2229 let default = default.clone();
2230 match self.eq_types(false,
2231 TypeOrigin::Misc(default.origin_span),
2233 // FIXME(#32730) propagate obligations
2234 Ok(InferOk { obligations, .. }) => assert!(obligations.is_empty()),
2236 result = Some(default);
2248 fn select_all_obligations_or_error(&self) {
2249 debug!("select_all_obligations_or_error");
2251 // upvar inference should have ensured that all deferred call
2252 // resolutions are handled by now.
2253 assert!(self.deferred_call_resolutions.borrow().is_empty());
2255 self.select_all_obligations_and_apply_defaults();
2257 let mut fulfillment_cx = self.fulfillment_cx.borrow_mut();
2259 // Steal the deferred obligations before the fulfillment
2260 // context can turn all of them into errors.
2261 let obligations = fulfillment_cx.take_deferred_obligations();
2262 self.deferred_obligations.borrow_mut().extend(obligations);
2264 match fulfillment_cx.select_all_or_error(self) {
2266 Err(errors) => { self.report_fulfillment_errors(&errors); }
2269 if let Err(ref errors) = fulfillment_cx.select_rfc1592_obligations(self) {
2270 self.report_fulfillment_errors_as_warnings(errors, self.body_id);
2274 /// Select as many obligations as we can at present.
2275 fn select_obligations_where_possible(&self) {
2276 match self.fulfillment_cx.borrow_mut().select_where_possible(self) {
2278 Err(errors) => { self.report_fulfillment_errors(&errors); }
2282 /// For the overloaded lvalue expressions (`*x`, `x[3]`), the trait
2283 /// returns a type of `&T`, but the actual type we assign to the
2284 /// *expression* is `T`. So this function just peels off the return
2285 /// type by one layer to yield `T`.
2286 fn make_overloaded_lvalue_return_type(&self,
2287 method: MethodCallee<'tcx>)
2288 -> ty::TypeAndMut<'tcx>
2290 // extract method return type, which will be &T;
2291 // all LB regions should have been instantiated during method lookup
2292 let ret_ty = method.ty.fn_ret();
2293 let ret_ty = self.tcx.no_late_bound_regions(&ret_ty).unwrap();
2295 // method returns &T, but the type as visible to user is T, so deref
2296 ret_ty.builtin_deref(true, NoPreference).unwrap()
2299 fn lookup_indexing(&self,
2301 base_expr: &'gcx hir::Expr,
2304 lvalue_pref: LvaluePreference)
2305 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2307 // FIXME(#18741) -- this is almost but not quite the same as the
2308 // autoderef that normal method probing does. They could likely be
2311 let mut autoderef = self.autoderef(base_expr.span, base_ty);
2313 while let Some((adj_ty, autoderefs)) = autoderef.next() {
2314 if let Some(final_mt) = self.try_index_step(
2315 MethodCall::expr(expr.id),
2316 expr, base_expr, adj_ty, autoderefs,
2317 false, lvalue_pref, idx_ty)
2319 autoderef.finalize(lvalue_pref, Some(base_expr));
2320 return Some(final_mt);
2323 if let ty::TyArray(element_ty, _) = adj_ty.sty {
2324 autoderef.finalize(lvalue_pref, Some(base_expr));
2325 let adjusted_ty = self.tcx.mk_slice(element_ty);
2326 return self.try_index_step(
2327 MethodCall::expr(expr.id), expr, base_expr,
2328 adjusted_ty, autoderefs, true, lvalue_pref, idx_ty);
2331 autoderef.unambiguous_final_ty();
2335 /// To type-check `base_expr[index_expr]`, we progressively autoderef
2336 /// (and otherwise adjust) `base_expr`, looking for a type which either
2337 /// supports builtin indexing or overloaded indexing.
2338 /// This loop implements one step in that search; the autoderef loop
2339 /// is implemented by `lookup_indexing`.
2340 fn try_index_step(&self,
2341 method_call: MethodCall,
2343 base_expr: &'gcx hir::Expr,
2344 adjusted_ty: Ty<'tcx>,
2347 lvalue_pref: LvaluePreference,
2349 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2352 debug!("try_index_step(expr={:?}, base_expr.id={:?}, adjusted_ty={:?}, \
2353 autoderefs={}, unsize={}, index_ty={:?})",
2361 let input_ty = self.next_ty_var();
2363 // First, try built-in indexing.
2364 match (adjusted_ty.builtin_index(), &index_ty.sty) {
2365 (Some(ty), &ty::TyUint(ast::UintTy::Us)) | (Some(ty), &ty::TyInfer(ty::IntVar(_))) => {
2366 debug!("try_index_step: success, using built-in indexing");
2367 // If we had `[T; N]`, we should've caught it before unsizing to `[T]`.
2369 self.write_autoderef_adjustment(base_expr.id, autoderefs);
2370 return Some((tcx.types.usize, ty));
2375 // Try `IndexMut` first, if preferred.
2376 let method = match (lvalue_pref, tcx.lang_items.index_mut_trait()) {
2377 (PreferMutLvalue, Some(trait_did)) => {
2378 self.lookup_method_in_trait_adjusted(expr.span,
2380 token::intern("index_mut"),
2385 Some(vec![input_ty]))
2390 // Otherwise, fall back to `Index`.
2391 let method = match (method, tcx.lang_items.index_trait()) {
2392 (None, Some(trait_did)) => {
2393 self.lookup_method_in_trait_adjusted(expr.span,
2395 token::intern("index"),
2400 Some(vec![input_ty]))
2402 (method, _) => method,
2405 // If some lookup succeeds, write callee into table and extract index/element
2406 // type from the method signature.
2407 // If some lookup succeeded, install method in table
2408 method.map(|method| {
2409 debug!("try_index_step: success, using overloaded indexing");
2410 self.tables.borrow_mut().method_map.insert(method_call, method);
2411 (input_ty, self.make_overloaded_lvalue_return_type(method).ty)
2415 fn check_method_argument_types(&self,
2417 method_fn_ty: Ty<'tcx>,
2418 callee_expr: &'gcx hir::Expr,
2419 args_no_rcvr: &'gcx [P<hir::Expr>],
2420 tuple_arguments: TupleArgumentsFlag,
2421 expected: Expectation<'tcx>)
2423 if method_fn_ty.references_error() {
2424 let err_inputs = self.err_args(args_no_rcvr.len());
2426 let err_inputs = match tuple_arguments {
2427 DontTupleArguments => err_inputs,
2428 TupleArguments => vec![self.tcx.mk_tup(err_inputs)],
2431 self.check_argument_types(sp, &err_inputs[..], &[], args_no_rcvr,
2432 false, tuple_arguments);
2435 match method_fn_ty.sty {
2436 ty::TyFnDef(_, _, ref fty) => {
2437 // HACK(eddyb) ignore self in the definition (see above).
2438 let expected_arg_tys = self.expected_types_for_fn_args(sp, expected,
2440 &fty.sig.0.inputs[1..]);
2441 self.check_argument_types(sp, &fty.sig.0.inputs[1..], &expected_arg_tys[..],
2442 args_no_rcvr, fty.sig.0.variadic, tuple_arguments);
2446 span_bug!(callee_expr.span, "method without bare fn type");
2452 /// Generic function that factors out common logic from function calls,
2453 /// method calls and overloaded operators.
2454 fn check_argument_types(&self,
2456 fn_inputs: &[Ty<'tcx>],
2457 expected_arg_tys: &[Ty<'tcx>],
2458 args: &'gcx [P<hir::Expr>],
2460 tuple_arguments: TupleArgumentsFlag) {
2463 // Grab the argument types, supplying fresh type variables
2464 // if the wrong number of arguments were supplied
2465 let supplied_arg_count = if tuple_arguments == DontTupleArguments {
2471 // All the input types from the fn signature must outlive the call
2472 // so as to validate implied bounds.
2473 for &fn_input_ty in fn_inputs {
2474 self.register_wf_obligation(fn_input_ty, sp, traits::MiscObligation);
2477 let mut expected_arg_tys = expected_arg_tys;
2478 let expected_arg_count = fn_inputs.len();
2480 fn parameter_count_error<'tcx>(sess: &Session, sp: Span, fn_inputs: &[Ty<'tcx>],
2481 expected_count: usize, arg_count: usize, error_code: &str,
2483 let mut err = sess.struct_span_err_with_code(sp,
2484 &format!("this function takes {}{} parameter{} but {} parameter{} supplied",
2485 if variadic {"at least "} else {""},
2487 if expected_count == 1 {""} else {"s"},
2489 if arg_count == 1 {" was"} else {"s were"}),
2492 err.span_label(sp, &format!("expected {}{} parameter{}",
2493 if variadic {"at least "} else {""},
2495 if expected_count == 1 {""} else {"s"}));
2497 let input_types = fn_inputs.iter().map(|i| format!("{:?}", i)).collect::<Vec<String>>();
2498 if input_types.len() > 0 {
2499 err.note(&format!("the following parameter type{} expected: {}",
2500 if expected_count == 1 {" was"} else {"s were"},
2501 input_types.join(", ")));
2506 let formal_tys = if tuple_arguments == TupleArguments {
2507 let tuple_type = self.structurally_resolved_type(sp, fn_inputs[0]);
2508 match tuple_type.sty {
2509 ty::TyTuple(arg_types) if arg_types.len() != args.len() => {
2510 parameter_count_error(tcx.sess, sp, fn_inputs, arg_types.len(), args.len(),
2512 expected_arg_tys = &[];
2513 self.err_args(args.len())
2515 ty::TyTuple(arg_types) => {
2516 expected_arg_tys = match expected_arg_tys.get(0) {
2517 Some(&ty) => match ty.sty {
2518 ty::TyTuple(ref tys) => &tys,
2526 span_err!(tcx.sess, sp, E0059,
2527 "cannot use call notation; the first type parameter \
2528 for the function trait is neither a tuple nor unit");
2529 expected_arg_tys = &[];
2530 self.err_args(args.len())
2533 } else if expected_arg_count == supplied_arg_count {
2535 } else if variadic {
2536 if supplied_arg_count >= expected_arg_count {
2539 parameter_count_error(tcx.sess, sp, fn_inputs, expected_arg_count,
2540 supplied_arg_count, "E0060", true);
2541 expected_arg_tys = &[];
2542 self.err_args(supplied_arg_count)
2545 parameter_count_error(tcx.sess, sp, fn_inputs, expected_arg_count, supplied_arg_count,
2547 expected_arg_tys = &[];
2548 self.err_args(supplied_arg_count)
2551 debug!("check_argument_types: formal_tys={:?}",
2552 formal_tys.iter().map(|t| self.ty_to_string(*t)).collect::<Vec<String>>());
2554 // Check the arguments.
2555 // We do this in a pretty awful way: first we typecheck any arguments
2556 // that are not anonymous functions, then we typecheck the anonymous
2557 // functions. This is so that we have more information about the types
2558 // of arguments when we typecheck the functions. This isn't really the
2559 // right way to do this.
2560 let xs = [false, true];
2561 let mut any_diverges = false; // has any of the arguments diverged?
2562 let mut warned = false; // have we already warned about unreachable code?
2563 for check_blocks in &xs {
2564 let check_blocks = *check_blocks;
2565 debug!("check_blocks={}", check_blocks);
2567 // More awful hacks: before we check argument types, try to do
2568 // an "opportunistic" vtable resolution of any trait bounds on
2569 // the call. This helps coercions.
2571 self.select_obligations_where_possible();
2574 // For variadic functions, we don't have a declared type for all of
2575 // the arguments hence we only do our usual type checking with
2576 // the arguments who's types we do know.
2577 let t = if variadic {
2579 } else if tuple_arguments == TupleArguments {
2584 for (i, arg) in args.iter().take(t).enumerate() {
2585 if any_diverges && !warned {
2588 .add_lint(lint::builtin::UNREACHABLE_CODE,
2591 "unreachable expression".to_string());
2594 let is_block = match arg.node {
2595 hir::ExprClosure(..) => true,
2599 if is_block == check_blocks {
2600 debug!("checking the argument");
2601 let formal_ty = formal_tys[i];
2603 // The special-cased logic below has three functions:
2604 // 1. Provide as good of an expected type as possible.
2605 let expected = expected_arg_tys.get(i).map(|&ty| {
2606 Expectation::rvalue_hint(self, ty)
2609 self.check_expr_with_expectation(&arg,
2610 expected.unwrap_or(ExpectHasType(formal_ty)));
2611 // 2. Coerce to the most detailed type that could be coerced
2612 // to, which is `expected_ty` if `rvalue_hint` returns an
2613 // `ExpectHasType(expected_ty)`, or the `formal_ty` otherwise.
2614 let coerce_ty = expected.and_then(|e| e.only_has_type(self));
2615 self.demand_coerce(&arg, coerce_ty.unwrap_or(formal_ty));
2617 // 3. Relate the expected type and the formal one,
2618 // if the expected type was used for the coercion.
2619 coerce_ty.map(|ty| self.demand_suptype(arg.span, formal_ty, ty));
2622 if let Some(&arg_ty) = self.tables.borrow().node_types.get(&arg.id) {
2623 // FIXME(canndrew): This is_never should probably be an is_uninhabited
2624 any_diverges = any_diverges ||
2625 self.type_var_diverges(arg_ty) ||
2629 if any_diverges && !warned {
2630 let parent = self.tcx.map.get_parent_node(args[0].id);
2633 .add_lint(lint::builtin::UNREACHABLE_CODE,
2636 "unreachable call".to_string());
2642 // We also need to make sure we at least write the ty of the other
2643 // arguments which we skipped above.
2645 for arg in args.iter().skip(expected_arg_count) {
2646 self.check_expr(&arg);
2648 // There are a few types which get autopromoted when passed via varargs
2649 // in C but we just error out instead and require explicit casts.
2650 let arg_ty = self.structurally_resolved_type(arg.span,
2651 self.expr_ty(&arg));
2653 ty::TyFloat(ast::FloatTy::F32) => {
2654 self.type_error_message(arg.span, |t| {
2655 format!("can't pass an `{}` to variadic \
2656 function, cast to `c_double`", t)
2659 ty::TyInt(ast::IntTy::I8) | ty::TyInt(ast::IntTy::I16) | ty::TyBool => {
2660 self.type_error_message(arg.span, |t| {
2661 format!("can't pass `{}` to variadic \
2662 function, cast to `c_int`",
2666 ty::TyUint(ast::UintTy::U8) | ty::TyUint(ast::UintTy::U16) => {
2667 self.type_error_message(arg.span, |t| {
2668 format!("can't pass `{}` to variadic \
2669 function, cast to `c_uint`",
2673 ty::TyFnDef(_, _, f) => {
2674 let ptr_ty = self.tcx.mk_fn_ptr(f);
2675 let ptr_ty = self.resolve_type_vars_if_possible(&ptr_ty);
2676 self.type_error_message(arg.span,
2678 format!("can't pass `{}` to variadic \
2679 function, cast to `{}`", t, ptr_ty)
2688 fn err_args(&self, len: usize) -> Vec<Ty<'tcx>> {
2689 (0..len).map(|_| self.tcx.types.err).collect()
2692 fn write_call(&self,
2693 call_expr: &hir::Expr,
2695 self.write_ty(call_expr.id, output);
2698 // AST fragment checking
2701 expected: Expectation<'tcx>)
2707 ast::LitKind::Str(..) => tcx.mk_static_str(),
2708 ast::LitKind::ByteStr(ref v) => {
2709 tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic),
2710 tcx.mk_array(tcx.types.u8, v.len()))
2712 ast::LitKind::Byte(_) => tcx.types.u8,
2713 ast::LitKind::Char(_) => tcx.types.char,
2714 ast::LitKind::Int(_, ast::LitIntType::Signed(t)) => tcx.mk_mach_int(t),
2715 ast::LitKind::Int(_, ast::LitIntType::Unsigned(t)) => tcx.mk_mach_uint(t),
2716 ast::LitKind::Int(_, ast::LitIntType::Unsuffixed) => {
2717 let opt_ty = expected.to_option(self).and_then(|ty| {
2719 ty::TyInt(_) | ty::TyUint(_) => Some(ty),
2720 ty::TyChar => Some(tcx.types.u8),
2721 ty::TyRawPtr(..) => Some(tcx.types.usize),
2722 ty::TyFnDef(..) | ty::TyFnPtr(_) => Some(tcx.types.usize),
2726 opt_ty.unwrap_or_else(
2727 || tcx.mk_int_var(self.next_int_var_id()))
2729 ast::LitKind::Float(_, t) => tcx.mk_mach_float(t),
2730 ast::LitKind::FloatUnsuffixed(_) => {
2731 let opt_ty = expected.to_option(self).and_then(|ty| {
2733 ty::TyFloat(_) => Some(ty),
2737 opt_ty.unwrap_or_else(
2738 || tcx.mk_float_var(self.next_float_var_id()))
2740 ast::LitKind::Bool(_) => tcx.types.bool
2744 fn check_expr_eq_type(&self,
2745 expr: &'gcx hir::Expr,
2746 expected: Ty<'tcx>) {
2747 self.check_expr_with_hint(expr, expected);
2748 self.demand_eqtype(expr.span, expected, self.expr_ty(expr));
2751 pub fn check_expr_has_type(&self,
2752 expr: &'gcx hir::Expr,
2753 expected: Ty<'tcx>) {
2754 self.check_expr_with_hint(expr, expected);
2755 self.demand_suptype(expr.span, expected, self.expr_ty(expr));
2758 fn check_expr_coercable_to_type(&self,
2759 expr: &'gcx hir::Expr,
2760 expected: Ty<'tcx>) {
2761 self.check_expr_with_hint(expr, expected);
2762 self.demand_coerce(expr, expected);
2765 fn check_expr_with_hint(&self, expr: &'gcx hir::Expr,
2766 expected: Ty<'tcx>) {
2767 self.check_expr_with_expectation(expr, ExpectHasType(expected))
2770 fn check_expr_with_expectation(&self,
2771 expr: &'gcx hir::Expr,
2772 expected: Expectation<'tcx>) {
2773 self.check_expr_with_expectation_and_lvalue_pref(expr, expected, NoPreference)
2776 fn check_expr(&self, expr: &'gcx hir::Expr) {
2777 self.check_expr_with_expectation(expr, NoExpectation)
2780 fn check_expr_with_lvalue_pref(&self, expr: &'gcx hir::Expr,
2781 lvalue_pref: LvaluePreference) {
2782 self.check_expr_with_expectation_and_lvalue_pref(expr, NoExpectation, lvalue_pref)
2785 // determine the `self` type, using fresh variables for all variables
2786 // declared on the impl declaration e.g., `impl<A,B> for Vec<(A,B)>`
2787 // would return ($0, $1) where $0 and $1 are freshly instantiated type
2789 pub fn impl_self_ty(&self,
2790 span: Span, // (potential) receiver for this impl
2792 -> TypeAndSubsts<'tcx> {
2793 let ity = self.tcx.lookup_item_type(did);
2794 debug!("impl_self_ty: ity={:?}", ity);
2796 let substs = self.fresh_substs_for_item(span, did);
2797 let substd_ty = self.instantiate_type_scheme(span, &substs, &ity.ty);
2799 TypeAndSubsts { substs: substs, ty: substd_ty }
2802 /// Unifies the return type with the expected type early, for more coercions
2803 /// and forward type information on the argument expressions.
2804 fn expected_types_for_fn_args(&self,
2806 expected_ret: Expectation<'tcx>,
2807 formal_ret: Ty<'tcx>,
2808 formal_args: &[Ty<'tcx>])
2810 let expected_args = expected_ret.only_has_type(self).and_then(|ret_ty| {
2811 self.commit_regions_if_ok(|| {
2812 // Attempt to apply a subtyping relationship between the formal
2813 // return type (likely containing type variables if the function
2814 // is polymorphic) and the expected return type.
2815 // No argument expectations are produced if unification fails.
2816 let origin = TypeOrigin::Misc(call_span);
2817 let ures = self.sub_types(false, origin, formal_ret, ret_ty);
2818 // FIXME(#15760) can't use try! here, FromError doesn't default
2819 // to identity so the resulting type is not constrained.
2821 // FIXME(#32730) propagate obligations
2822 Ok(InferOk { obligations, .. }) => assert!(obligations.is_empty()),
2823 Err(e) => return Err(e),
2826 // Record all the argument types, with the substitutions
2827 // produced from the above subtyping unification.
2828 Ok(formal_args.iter().map(|ty| {
2829 self.resolve_type_vars_if_possible(ty)
2832 }).unwrap_or(vec![]);
2833 debug!("expected_types_for_fn_args(formal={:?} -> {:?}, expected={:?} -> {:?})",
2834 formal_args, formal_ret,
2835 expected_args, expected_ret);
2839 // Checks a method call.
2840 fn check_method_call(&self,
2841 expr: &'gcx hir::Expr,
2842 method_name: Spanned<ast::Name>,
2843 args: &'gcx [P<hir::Expr>],
2845 expected: Expectation<'tcx>,
2846 lvalue_pref: LvaluePreference) {
2847 let rcvr = &args[0];
2848 self.check_expr_with_lvalue_pref(&rcvr, lvalue_pref);
2850 // no need to check for bot/err -- callee does that
2851 let expr_t = self.structurally_resolved_type(expr.span, self.expr_ty(&rcvr));
2853 let tps = tps.iter().map(|ast_ty| self.to_ty(&ast_ty)).collect::<Vec<_>>();
2854 let fn_ty = match self.lookup_method(method_name.span,
2861 let method_ty = method.ty;
2862 let method_call = MethodCall::expr(expr.id);
2863 self.tables.borrow_mut().method_map.insert(method_call, method);
2867 if method_name.node != keywords::Invalid.name() {
2868 self.report_method_error(method_name.span, expr_t,
2869 method_name.node, Some(rcvr), error);
2871 self.write_error(expr.id);
2876 // Call the generic checker.
2877 let ret_ty = self.check_method_argument_types(method_name.span, fn_ty,
2882 self.write_call(expr, ret_ty);
2885 // A generic function for checking the then and else in an if
2887 fn check_then_else(&self,
2888 cond_expr: &'gcx hir::Expr,
2889 then_blk: &'gcx hir::Block,
2890 opt_else_expr: Option<&'gcx hir::Expr>,
2893 expected: Expectation<'tcx>) {
2894 self.check_expr_has_type(cond_expr, self.tcx.types.bool);
2896 let expected = expected.adjust_for_branches(self);
2897 self.check_block_with_expected(then_blk, expected);
2898 let then_ty = self.node_ty(then_blk.id);
2900 let unit = self.tcx.mk_nil();
2901 let (origin, expected, found, result) =
2902 if let Some(else_expr) = opt_else_expr {
2903 self.check_expr_with_expectation(else_expr, expected);
2904 let else_ty = self.expr_ty(else_expr);
2905 let origin = TypeOrigin::IfExpression(sp);
2907 // Only try to coerce-unify if we have a then expression
2908 // to assign coercions to, otherwise it's () or diverging.
2909 let result = if let Some(ref then) = then_blk.expr {
2910 let res = self.try_find_coercion_lub(origin, || Some(&**then),
2911 then_ty, else_expr);
2913 // In case we did perform an adjustment, we have to update
2914 // the type of the block, because old trans still uses it.
2915 let adj = self.tables.borrow().adjustments.get(&then.id).cloned();
2916 if res.is_ok() && adj.is_some() {
2917 self.write_ty(then_blk.id, self.adjust_expr_ty(then, adj.as_ref()));
2922 self.commit_if_ok(|_| {
2923 let trace = TypeTrace::types(origin, true, then_ty, else_ty);
2924 self.lub(true, trace, &then_ty, &else_ty)
2925 .map(|InferOk { value, obligations }| {
2926 // FIXME(#32730) propagate obligations
2927 assert!(obligations.is_empty());
2932 (origin, then_ty, else_ty, result)
2934 let origin = TypeOrigin::IfExpressionWithNoElse(sp);
2935 (origin, unit, then_ty,
2936 self.eq_types(true, origin, unit, then_ty)
2937 .map(|InferOk { obligations, .. }| {
2938 // FIXME(#32730) propagate obligations
2939 assert!(obligations.is_empty());
2944 let if_ty = match result {
2946 if self.expr_ty(cond_expr).references_error() {
2953 self.report_mismatched_types(origin, expected, found, e);
2958 self.write_ty(id, if_ty);
2961 // Check field access expressions
2962 fn check_field(&self,
2963 expr: &'gcx hir::Expr,
2964 lvalue_pref: LvaluePreference,
2965 base: &'gcx hir::Expr,
2966 field: &Spanned<ast::Name>) {
2967 self.check_expr_with_lvalue_pref(base, lvalue_pref);
2968 let expr_t = self.structurally_resolved_type(expr.span,
2969 self.expr_ty(base));
2970 let mut private_candidate = None;
2971 let mut autoderef = self.autoderef(expr.span, expr_t);
2972 while let Some((base_t, autoderefs)) = autoderef.next() {
2973 if let ty::TyStruct(base_def, substs) = base_t.sty {
2974 debug!("struct named {:?}", base_t);
2975 if let Some(field) = base_def.struct_variant().find_field_named(field.node) {
2976 let field_ty = self.field_ty(expr.span, field, substs);
2977 if field.vis.is_accessible_from(self.body_id, &self.tcx().map) {
2978 autoderef.finalize(lvalue_pref, Some(base));
2979 self.write_ty(expr.id, field_ty);
2980 self.write_autoderef_adjustment(base.id, autoderefs);
2983 private_candidate = Some((base_def.did, field_ty));
2987 autoderef.unambiguous_final_ty();
2989 if let Some((did, field_ty)) = private_candidate {
2990 let struct_path = self.tcx().item_path_str(did);
2991 self.write_ty(expr.id, field_ty);
2992 let msg = format!("field `{}` of struct `{}` is private", field.node, struct_path);
2993 let mut err = self.tcx().sess.struct_span_err(expr.span, &msg);
2994 // Also check if an accessible method exists, which is often what is meant.
2995 if self.method_exists(field.span, field.node, expr_t, expr.id, false) {
2996 err.note(&format!("a method `{}` also exists, perhaps you wish to call it",
3000 } else if field.node == keywords::Invalid.name() {
3001 self.write_error(expr.id);
3002 } else if self.method_exists(field.span, field.node, expr_t, expr.id, true) {
3003 self.type_error_struct(field.span, |actual| {
3004 format!("attempted to take value of method `{}` on type \
3005 `{}`", field.node, actual)
3007 .help("maybe a `()` to call it is missing? \
3008 If not, try an anonymous function")
3010 self.write_error(expr.id);
3012 let mut err = self.type_error_struct(expr.span, |actual| {
3013 format!("attempted access of field `{}` on type `{}`, \
3014 but no field with that name was found",
3017 if let ty::TyRawPtr(..) = expr_t.sty {
3018 err.note(&format!("`{0}` is a native pointer; perhaps you need to deref with \
3019 `(*{0}).{1}`", pprust::expr_to_string(base), field.node));
3021 if let ty::TyStruct(def, _) = expr_t.sty {
3022 Self::suggest_field_names(&mut err, def.struct_variant(), field, vec![]);
3025 self.write_error(expr.id);
3029 // displays hints about the closest matches in field names
3030 fn suggest_field_names(err: &mut DiagnosticBuilder,
3031 variant: ty::VariantDef<'tcx>,
3032 field: &Spanned<ast::Name>,
3033 skip : Vec<InternedString>) {
3034 let name = field.node.as_str();
3035 let names = variant.fields.iter().filter_map(|field| {
3036 // ignore already set fields and private fields from non-local crates
3037 if skip.iter().any(|x| *x == field.name.as_str()) ||
3038 (variant.did.krate != LOCAL_CRATE && field.vis != Visibility::Public) {
3045 // only find fits with at least one matching letter
3046 if let Some(name) = find_best_match_for_name(names, &name, Some(name.len())) {
3047 err.span_help(field.span,
3048 &format!("did you mean `{}`?", name));
3052 // Check tuple index expressions
3053 fn check_tup_field(&self,
3054 expr: &'gcx hir::Expr,
3055 lvalue_pref: LvaluePreference,
3056 base: &'gcx hir::Expr,
3057 idx: codemap::Spanned<usize>) {
3058 self.check_expr_with_lvalue_pref(base, lvalue_pref);
3059 let expr_t = self.structurally_resolved_type(expr.span,
3060 self.expr_ty(base));
3061 let mut private_candidate = None;
3062 let mut tuple_like = false;
3063 let mut autoderef = self.autoderef(expr.span, expr_t);
3064 while let Some((base_t, autoderefs)) = autoderef.next() {
3065 let field = match base_t.sty {
3066 ty::TyStruct(base_def, substs) => {
3067 tuple_like = base_def.struct_variant().kind == ty::VariantKind::Tuple;
3068 if !tuple_like { continue }
3070 debug!("tuple struct named {:?}", base_t);
3071 base_def.struct_variant().fields.get(idx.node).and_then(|field| {
3072 let field_ty = self.field_ty(expr.span, field, substs);
3073 private_candidate = Some((base_def.did, field_ty));
3074 if field.vis.is_accessible_from(self.body_id, &self.tcx().map) {
3081 ty::TyTuple(ref v) => {
3083 v.get(idx.node).cloned()
3088 if let Some(field_ty) = field {
3089 autoderef.finalize(lvalue_pref, Some(base));
3090 self.write_ty(expr.id, field_ty);
3091 self.write_autoderef_adjustment(base.id, autoderefs);
3095 autoderef.unambiguous_final_ty();
3097 if let Some((did, field_ty)) = private_candidate {
3098 let struct_path = self.tcx().item_path_str(did);
3099 let msg = format!("field `{}` of struct `{}` is private", idx.node, struct_path);
3100 self.tcx().sess.span_err(expr.span, &msg);
3101 self.write_ty(expr.id, field_ty);
3105 self.type_error_message(
3109 format!("attempted out-of-bounds tuple index `{}` on \
3114 format!("attempted tuple index `{}` on type `{}`, but the \
3115 type was not a tuple or tuple struct",
3122 self.write_error(expr.id);
3125 fn report_unknown_field(&self,
3127 variant: ty::VariantDef<'tcx>,
3129 skip_fields: &[hir::Field]) {
3130 let mut err = self.type_error_struct_with_diag(
3132 |actual| if let ty::TyEnum(..) = ty.sty {
3133 struct_span_err!(self.tcx.sess, field.name.span, E0559,
3134 "struct variant `{}::{}` has no field named `{}`",
3135 actual, variant.name.as_str(), field.name.node)
3137 struct_span_err!(self.tcx.sess, field.name.span, E0560,
3138 "structure `{}` has no field named `{}`",
3139 actual, field.name.node)
3142 // prevent all specified fields from being suggested
3143 let skip_fields = skip_fields.iter().map(|ref x| x.name.node.as_str());
3144 Self::suggest_field_names(&mut err, variant, &field.name, skip_fields.collect());
3148 fn check_expr_struct_fields(&self,
3151 variant: ty::VariantDef<'tcx>,
3152 ast_fields: &'gcx [hir::Field],
3153 check_completeness: bool) {
3155 let substs = match adt_ty.sty {
3156 ty::TyStruct(_, substs) | ty::TyEnum(_, substs) => substs,
3157 _ => span_bug!(span, "non-ADT passed to check_expr_struct_fields")
3160 let mut remaining_fields = FnvHashMap();
3161 for field in &variant.fields {
3162 remaining_fields.insert(field.name, field);
3165 let mut seen_fields = FnvHashMap();
3167 let mut error_happened = false;
3169 // Typecheck each field.
3170 for field in ast_fields {
3171 let expected_field_type;
3173 if let Some(v_field) = remaining_fields.remove(&field.name.node) {
3174 expected_field_type = self.field_ty(field.span, v_field, substs);
3176 seen_fields.insert(field.name.node, field.span);
3178 error_happened = true;
3179 expected_field_type = tcx.types.err;
3180 if let Some(_) = variant.find_field_named(field.name.node) {
3181 let mut err = struct_span_err!(self.tcx.sess,
3184 "field `{}` specified more than once",
3187 err.span_label(field.name.span, &format!("used more than once"));
3189 if let Some(prev_span) = seen_fields.get(&field.name.node) {
3190 err.span_label(*prev_span, &format!("first use of `{}`", field.name.node));
3195 self.report_unknown_field(adt_ty, variant, field, ast_fields);
3199 // Make sure to give a type to the field even if there's
3200 // an error, so we can continue typechecking
3201 self.check_expr_coercable_to_type(&field.expr, expected_field_type);
3204 // Make sure the programmer specified all the fields.
3205 if check_completeness &&
3207 !remaining_fields.is_empty()
3209 span_err!(tcx.sess, span, E0063,
3210 "missing field{} {} in initializer of `{}`",
3211 if remaining_fields.len() == 1 {""} else {"s"},
3212 remaining_fields.keys()
3213 .map(|n| format!("`{}`", n))
3214 .collect::<Vec<_>>()
3221 fn check_struct_fields_on_error(&self,
3223 fields: &'gcx [hir::Field],
3224 base_expr: &'gcx Option<P<hir::Expr>>) {
3225 // Make sure to still write the types
3226 // otherwise we might ICE
3227 self.write_error(id);
3228 for field in fields {
3229 self.check_expr(&field.expr);
3232 Some(ref base) => self.check_expr(&base),
3237 pub fn check_struct_path(&self,
3239 node_id: ast::NodeId,
3241 -> Option<(ty::VariantDef<'tcx>, Ty<'tcx>)> {
3242 let def = self.finish_resolving_struct_path(path, node_id, span);
3243 let variant = match def {
3245 self.set_tainted_by_errors();
3248 Def::Variant(type_did, _) | Def::Struct(type_did) => {
3249 Some((type_did, self.tcx.expect_variant_def(def)))
3251 Def::TyAlias(did) => {
3252 if let Some(&ty::TyStruct(adt, _)) = self.tcx.opt_lookup_item_type(did)
3253 .map(|scheme| &scheme.ty.sty) {
3254 Some((did, adt.struct_variant()))
3262 if let Some((def_id, variant)) = variant {
3263 if variant.kind == ty::VariantKind::Tuple &&
3264 !self.tcx.sess.features.borrow().relaxed_adts {
3265 emit_feature_err(&self.tcx.sess.parse_sess.span_diagnostic,
3266 "relaxed_adts", span, GateIssue::Language,
3267 "tuple structs and variants in struct patterns are unstable");
3269 let ty = self.instantiate_type_path(def_id, path, node_id);
3272 struct_span_err!(self.tcx.sess, path.span, E0071,
3273 "`{}` does not name a struct or a struct variant",
3274 pprust::path_to_string(path))
3275 .span_label(path.span, &format!("not a struct"))
3281 fn check_expr_struct(&self,
3284 fields: &'gcx [hir::Field],
3285 base_expr: &'gcx Option<P<hir::Expr>>)
3287 // Find the relevant variant
3288 let (variant, expr_ty) = if let Some(variant_ty) = self.check_struct_path(path, expr.id,
3292 self.check_struct_fields_on_error(expr.id, fields, base_expr);
3296 self.check_expr_struct_fields(expr_ty, path.span, variant, fields,
3297 base_expr.is_none());
3298 if let &Some(ref base_expr) = base_expr {
3299 self.check_expr_has_type(base_expr, expr_ty);
3301 ty::TyStruct(adt, substs) => {
3302 self.tables.borrow_mut().fru_field_types.insert(
3304 adt.struct_variant().fields.iter().map(|f| {
3305 self.normalize_associated_types_in(
3306 expr.span, &f.ty(self.tcx, substs)
3312 span_err!(self.tcx.sess, base_expr.span, E0436,
3313 "functional record update syntax requires a struct");
3321 /// If an expression has any sub-expressions that result in a type error,
3322 /// inspecting that expression's type with `ty.references_error()` will return
3323 /// true. Likewise, if an expression is known to diverge, inspecting its
3324 /// type with `ty::type_is_bot` will return true (n.b.: since Rust is
3325 /// strict, _|_ can appear in the type of an expression that does not,
3326 /// itself, diverge: for example, fn() -> _|_.)
3327 /// Note that inspecting a type's structure *directly* may expose the fact
3328 /// that there are actually multiple representations for `TyError`, so avoid
3329 /// that when err needs to be handled differently.
3330 fn check_expr_with_expectation_and_lvalue_pref(&self,
3331 expr: &'gcx hir::Expr,
3332 expected: Expectation<'tcx>,
3333 lvalue_pref: LvaluePreference) {
3334 debug!(">> typechecking: expr={:?} expected={:?}",
3340 hir::ExprBox(ref subexpr) => {
3341 let expected_inner = expected.to_option(self).map_or(NoExpectation, |ty| {
3343 ty::TyBox(ty) => Expectation::rvalue_hint(self, ty),
3347 self.check_expr_with_expectation(subexpr, expected_inner);
3348 let referent_ty = self.expr_ty(&subexpr);
3349 self.write_ty(id, tcx.mk_box(referent_ty));
3352 hir::ExprLit(ref lit) => {
3353 let typ = self.check_lit(&lit, expected);
3354 self.write_ty(id, typ);
3356 hir::ExprBinary(op, ref lhs, ref rhs) => {
3357 self.check_binop(expr, op, lhs, rhs);
3359 hir::ExprAssignOp(op, ref lhs, ref rhs) => {
3360 self.check_binop_assign(expr, op, lhs, rhs);
3362 hir::ExprUnary(unop, ref oprnd) => {
3363 let expected_inner = match unop {
3364 hir::UnNot | hir::UnNeg => {
3371 let lvalue_pref = match unop {
3372 hir::UnDeref => lvalue_pref,
3375 self.check_expr_with_expectation_and_lvalue_pref(&oprnd,
3378 let mut oprnd_t = self.expr_ty(&oprnd);
3380 if !oprnd_t.references_error() {
3383 oprnd_t = self.structurally_resolved_type(expr.span, oprnd_t);
3385 if let Some(mt) = oprnd_t.builtin_deref(true, NoPreference) {
3387 } else if let Some(method) = self.try_overloaded_deref(
3388 expr.span, Some(&oprnd), oprnd_t, lvalue_pref) {
3389 oprnd_t = self.make_overloaded_lvalue_return_type(method).ty;
3390 self.tables.borrow_mut().method_map.insert(MethodCall::expr(expr.id),
3393 self.type_error_message(expr.span, |actual| {
3394 format!("type `{}` cannot be \
3395 dereferenced", actual)
3397 oprnd_t = tcx.types.err;
3401 oprnd_t = self.structurally_resolved_type(oprnd.span,
3403 if !(oprnd_t.is_integral() || oprnd_t.sty == ty::TyBool) {
3404 oprnd_t = self.check_user_unop("!", "not",
3405 tcx.lang_items.not_trait(),
3406 expr, &oprnd, oprnd_t, unop);
3410 oprnd_t = self.structurally_resolved_type(oprnd.span,
3412 if !(oprnd_t.is_integral() || oprnd_t.is_fp()) {
3413 oprnd_t = self.check_user_unop("-", "neg",
3414 tcx.lang_items.neg_trait(),
3415 expr, &oprnd, oprnd_t, unop);
3420 self.write_ty(id, oprnd_t);
3422 hir::ExprAddrOf(mutbl, ref oprnd) => {
3423 let hint = expected.only_has_type(self).map_or(NoExpectation, |ty| {
3425 ty::TyRef(_, ref mt) | ty::TyRawPtr(ref mt) => {
3426 if self.tcx.expr_is_lval(&oprnd) {
3427 // Lvalues may legitimately have unsized types.
3428 // For example, dereferences of a fat pointer and
3429 // the last field of a struct can be unsized.
3430 ExpectHasType(mt.ty)
3432 Expectation::rvalue_hint(self, mt.ty)
3438 let lvalue_pref = LvaluePreference::from_mutbl(mutbl);
3439 self.check_expr_with_expectation_and_lvalue_pref(&oprnd, hint, lvalue_pref);
3441 let tm = ty::TypeAndMut { ty: self.expr_ty(&oprnd), mutbl: mutbl };
3442 let oprnd_t = if tm.ty.references_error() {
3445 // Note: at this point, we cannot say what the best lifetime
3446 // is to use for resulting pointer. We want to use the
3447 // shortest lifetime possible so as to avoid spurious borrowck
3448 // errors. Moreover, the longest lifetime will depend on the
3449 // precise details of the value whose address is being taken
3450 // (and how long it is valid), which we don't know yet until type
3451 // inference is complete.
3453 // Therefore, here we simply generate a region variable. The
3454 // region inferencer will then select the ultimate value.
3455 // Finally, borrowck is charged with guaranteeing that the
3456 // value whose address was taken can actually be made to live
3457 // as long as it needs to live.
3458 let region = self.next_region_var(infer::AddrOfRegion(expr.span));
3459 tcx.mk_ref(tcx.mk_region(region), tm)
3461 self.write_ty(id, oprnd_t);
3463 hir::ExprPath(ref opt_qself, ref path) => {
3464 let opt_self_ty = opt_qself.as_ref().map(|qself| self.to_ty(&qself.ty));
3465 let (def, opt_ty, segments) = self.resolve_ty_and_def_ufcs(opt_self_ty, path,
3466 expr.id, expr.span);
3467 if def != Def::Err {
3468 self.instantiate_value_path(segments, opt_ty, def, expr.span, id);
3470 self.set_tainted_by_errors();
3471 self.write_error(id);
3474 // We always require that the type provided as the value for
3475 // a type parameter outlives the moment of instantiation.
3476 self.opt_node_ty_substs(expr.id, |item_substs| {
3477 self.add_wf_bounds(&item_substs.substs, expr);
3480 hir::ExprInlineAsm(_, ref outputs, ref inputs) => {
3481 for output in outputs {
3482 self.check_expr(output);
3484 for input in inputs {
3485 self.check_expr(input);
3489 hir::ExprBreak(_) => { self.write_never(id); }
3490 hir::ExprAgain(_) => { self.write_never(id); }
3491 hir::ExprRet(ref expr_opt) => {
3492 if let Some(ref e) = *expr_opt {
3493 self.check_expr_coercable_to_type(&e, self.ret_ty);
3495 let eq_result = self.eq_types(false,
3496 TypeOrigin::Misc(expr.span),
3499 // FIXME(#32730) propagate obligations
3500 .map(|InferOk { obligations, .. }| assert!(obligations.is_empty()));
3501 if eq_result.is_err() {
3502 struct_span_err!(tcx.sess, expr.span, E0069,
3503 "`return;` in a function whose return type is not `()`")
3504 .span_label(expr.span, &format!("return type is not ()"))
3508 self.write_never(id);
3510 hir::ExprAssign(ref lhs, ref rhs) => {
3511 self.check_expr_with_lvalue_pref(&lhs, PreferMutLvalue);
3514 if !tcx.expr_is_lval(&lhs) {
3516 tcx.sess, expr.span, E0070,
3517 "invalid left-hand side expression")
3520 &format!("left-hand of expression not valid"))
3524 let lhs_ty = self.expr_ty(&lhs);
3525 self.check_expr_coercable_to_type(&rhs, lhs_ty);
3526 let rhs_ty = self.expr_ty(&rhs);
3528 self.require_expr_have_sized_type(&lhs, traits::AssignmentLhsSized);
3530 if lhs_ty.references_error() || rhs_ty.references_error() {
3531 self.write_error(id);
3536 hir::ExprIf(ref cond, ref then_blk, ref opt_else_expr) => {
3537 self.check_then_else(&cond, &then_blk, opt_else_expr.as_ref().map(|e| &**e),
3538 id, expr.span, expected);
3540 hir::ExprWhile(ref cond, ref body, _) => {
3541 self.check_expr_has_type(&cond, tcx.types.bool);
3542 self.check_block_no_value(&body);
3543 let cond_ty = self.expr_ty(&cond);
3544 let body_ty = self.node_ty(body.id);
3545 if cond_ty.references_error() || body_ty.references_error() {
3546 self.write_error(id);
3552 hir::ExprLoop(ref body, _) => {
3553 self.check_block_no_value(&body);
3554 if !may_break(tcx, expr.id, &body) {
3555 self.write_never(id);
3560 hir::ExprMatch(ref discrim, ref arms, match_src) => {
3561 self.check_match(expr, &discrim, arms, expected, match_src);
3563 hir::ExprClosure(capture, ref decl, ref body, _) => {
3564 self.check_expr_closure(expr, capture, &decl, &body, expected);
3566 hir::ExprBlock(ref b) => {
3567 self.check_block_with_expected(&b, expected);
3568 self.write_ty(id, self.node_ty(b.id));
3570 hir::ExprCall(ref callee, ref args) => {
3571 self.check_call(expr, &callee, &args[..], expected);
3573 // we must check that return type of called functions is WF:
3574 let ret_ty = self.expr_ty(expr);
3575 self.register_wf_obligation(ret_ty, expr.span, traits::MiscObligation);
3577 hir::ExprMethodCall(name, ref tps, ref args) => {
3578 self.check_method_call(expr, name, &args[..], &tps[..], expected, lvalue_pref);
3579 let arg_tys = args.iter().map(|a| self.expr_ty(&a));
3580 let args_err = arg_tys.fold(false, |rest_err, a| rest_err || a.references_error());
3582 self.write_error(id);
3585 hir::ExprCast(ref e, ref t) => {
3586 if let hir::TyFixedLengthVec(_, ref count_expr) = t.node {
3587 self.check_expr_with_hint(&count_expr, tcx.types.usize);
3590 // Find the type of `e`. Supply hints based on the type we are casting to,
3592 let t_cast = self.to_ty(t);
3593 let t_cast = self.resolve_type_vars_if_possible(&t_cast);
3594 self.check_expr_with_expectation(e, ExpectCastableToType(t_cast));
3595 let t_expr = self.expr_ty(e);
3596 let t_cast = self.resolve_type_vars_if_possible(&t_cast);
3598 // Eagerly check for some obvious errors.
3599 if t_expr.references_error() || t_cast.references_error() {
3600 self.write_error(id);
3602 // Write a type for the whole expression, assuming everything is going
3604 self.write_ty(id, t_cast);
3606 // Defer other checks until we're done type checking.
3607 let mut deferred_cast_checks = self.deferred_cast_checks.borrow_mut();
3608 match cast::CastCheck::new(self, e, t_expr, t_cast, t.span, expr.span) {
3610 deferred_cast_checks.push(cast_check);
3612 Err(ErrorReported) => {
3613 self.write_error(id);
3618 hir::ExprType(ref e, ref t) => {
3619 let typ = self.to_ty(&t);
3620 self.check_expr_eq_type(&e, typ);
3621 self.write_ty(id, typ);
3623 hir::ExprVec(ref args) => {
3624 let uty = expected.to_option(self).and_then(|uty| {
3626 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3631 let mut unified = self.next_ty_var();
3632 let coerce_to = uty.unwrap_or(unified);
3634 for (i, e) in args.iter().enumerate() {
3635 self.check_expr_with_hint(e, coerce_to);
3636 let e_ty = self.expr_ty(e);
3637 let origin = TypeOrigin::Misc(e.span);
3639 // Special-case the first element, as it has no "previous expressions".
3640 let result = if i == 0 {
3641 self.try_coerce(e, coerce_to)
3643 let prev_elems = || args[..i].iter().map(|e| &**e);
3644 self.try_find_coercion_lub(origin, prev_elems, unified, e)
3648 Ok(ty) => unified = ty,
3650 self.report_mismatched_types(origin, unified, e_ty, e);
3654 self.write_ty(id, tcx.mk_array(unified, args.len()));
3656 hir::ExprRepeat(ref element, ref count_expr) => {
3657 self.check_expr_has_type(&count_expr, tcx.types.usize);
3658 let count = eval_length(self.tcx.global_tcx(), &count_expr, "repeat count")
3661 let uty = match expected {
3662 ExpectHasType(uty) => {
3664 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3671 let (element_ty, t) = match uty {
3673 self.check_expr_coercable_to_type(&element, uty);
3677 let t: Ty = self.next_ty_var();
3678 self.check_expr_has_type(&element, t);
3679 (self.expr_ty(&element), t)
3684 // For [foo, ..n] where n > 1, `foo` must have
3686 self.require_type_meets(t, expr.span, traits::RepeatVec, ty::BoundCopy);
3689 if element_ty.references_error() {
3690 self.write_error(id);
3692 let t = tcx.mk_array(t, count);
3693 self.write_ty(id, t);
3696 hir::ExprTup(ref elts) => {
3697 let flds = expected.only_has_type(self).and_then(|ty| {
3699 ty::TyTuple(ref flds) => Some(&flds[..]),
3703 let mut err_field = false;
3705 let elt_ts = elts.iter().enumerate().map(|(i, e)| {
3706 let t = match flds {
3707 Some(ref fs) if i < fs.len() => {
3709 self.check_expr_coercable_to_type(&e, ety);
3713 self.check_expr_with_expectation(&e, NoExpectation);
3717 err_field = err_field || t.references_error();
3721 self.write_error(id);
3723 let typ = tcx.mk_tup(elt_ts);
3724 self.write_ty(id, typ);
3727 hir::ExprStruct(ref path, ref fields, ref base_expr) => {
3728 self.check_expr_struct(expr, path, fields, base_expr);
3730 self.require_expr_have_sized_type(expr, traits::StructInitializerSized);
3732 hir::ExprField(ref base, ref field) => {
3733 self.check_field(expr, lvalue_pref, &base, field);
3735 hir::ExprTupField(ref base, idx) => {
3736 self.check_tup_field(expr, lvalue_pref, &base, idx);
3738 hir::ExprIndex(ref base, ref idx) => {
3739 self.check_expr_with_lvalue_pref(&base, lvalue_pref);
3740 self.check_expr(&idx);
3742 let base_t = self.expr_ty(&base);
3743 let idx_t = self.expr_ty(&idx);
3745 if base_t.references_error() {
3746 self.write_ty(id, base_t);
3747 } else if idx_t.references_error() {
3748 self.write_ty(id, idx_t);
3750 let base_t = self.structurally_resolved_type(expr.span, base_t);
3751 match self.lookup_indexing(expr, base, base_t, idx_t, lvalue_pref) {
3752 Some((index_ty, element_ty)) => {
3753 let idx_expr_ty = self.expr_ty(idx);
3754 self.demand_eqtype(expr.span, index_ty, idx_expr_ty);
3755 self.write_ty(id, element_ty);
3758 self.check_expr_has_type(&idx, self.tcx.types.err);
3759 let mut err = self.type_error_struct(
3762 format!("cannot index a value of type `{}`",
3766 // Try to give some advice about indexing tuples.
3767 if let ty::TyTuple(_) = base_t.sty {
3768 let mut needs_note = true;
3769 // If the index is an integer, we can show the actual
3770 // fixed expression:
3771 if let hir::ExprLit(ref lit) = idx.node {
3772 if let ast::LitKind::Int(i,
3773 ast::LitIntType::Unsuffixed) = lit.node {
3774 let snip = tcx.sess.codemap().span_to_snippet(base.span);
3775 if let Ok(snip) = snip {
3776 err.span_suggestion(expr.span,
3777 "to access tuple elements, \
3778 use tuple indexing syntax \
3780 format!("{}.{}", snip, i));
3786 err.help("to access tuple elements, use tuple indexing \
3787 syntax (e.g. `tuple.0`)");
3791 self.write_ty(id, self.tcx().types.err);
3798 debug!("type of expr({}) {} is...", expr.id,
3799 pprust::expr_to_string(expr));
3800 debug!("... {:?}, expected is {:?}",
3805 // Finish resolving a path in a struct expression or pattern `S::A { .. }` if necessary.
3806 // The newly resolved definition is written into `def_map`.
3807 pub fn finish_resolving_struct_path(&self,
3809 node_id: ast::NodeId,
3813 let path_res = self.tcx().expect_resolution(node_id);
3814 if path_res.depth == 0 {
3815 // If fully resolved already, we don't have to do anything.
3818 let base_ty_end = path.segments.len() - path_res.depth;
3819 let (_ty, def) = AstConv::finish_resolving_def_to_ty(self, self, span,
3820 PathParamMode::Optional,
3824 &path.segments[..base_ty_end],
3825 &path.segments[base_ty_end..]);
3826 // Write back the new resolution.
3827 self.tcx().def_map.borrow_mut().insert(node_id, PathResolution::new(def));
3832 // Resolve associated value path into a base type and associated constant or method definition.
3833 // The newly resolved definition is written into `def_map`.
3834 pub fn resolve_ty_and_def_ufcs<'b>(&self,
3835 opt_self_ty: Option<Ty<'tcx>>,
3836 path: &'b hir::Path,
3837 node_id: ast::NodeId,
3839 -> (Def, Option<Ty<'tcx>>, &'b [hir::PathSegment])
3841 let path_res = self.tcx().expect_resolution(node_id);
3842 if path_res.depth == 0 {
3843 // If fully resolved already, we don't have to do anything.
3844 (path_res.base_def, opt_self_ty, &path.segments)
3846 // Try to resolve everything except for the last segment as a type.
3847 let ty_segments = path.segments.split_last().unwrap().1;
3848 let base_ty_end = path.segments.len() - path_res.depth;
3849 let (ty, _def) = AstConv::finish_resolving_def_to_ty(self, self, span,
3850 PathParamMode::Optional,
3854 &ty_segments[..base_ty_end],
3855 &ty_segments[base_ty_end..]);
3857 // Resolve an associated constant or method on the previously resolved type.
3858 let item_segment = path.segments.last().unwrap();
3859 let item_name = item_segment.name;
3860 let def = match self.resolve_ufcs(span, item_name, ty, node_id) {
3863 let def = match error {
3864 method::MethodError::PrivateMatch(def) => def,
3867 if item_name != keywords::Invalid.name() {
3868 self.report_method_error(span, ty, item_name, None, error);
3874 // Write back the new resolution.
3875 self.tcx().def_map.borrow_mut().insert(node_id, PathResolution::new(def));
3876 (def, Some(ty), slice::ref_slice(item_segment))
3880 pub fn check_decl_initializer(&self,
3881 local: &'gcx hir::Local,
3882 init: &'gcx hir::Expr)
3884 let ref_bindings = self.tcx.pat_contains_ref_binding(&local.pat);
3886 let local_ty = self.local_ty(init.span, local.id);
3887 if let Some(m) = ref_bindings {
3888 // Somewhat subtle: if we have a `ref` binding in the pattern,
3889 // we want to avoid introducing coercions for the RHS. This is
3890 // both because it helps preserve sanity and, in the case of
3891 // ref mut, for soundness (issue #23116). In particular, in
3892 // the latter case, we need to be clear that the type of the
3893 // referent for the reference that results is *equal to* the
3894 // type of the lvalue it is referencing, and not some
3895 // supertype thereof.
3896 self.check_expr_with_lvalue_pref(init, LvaluePreference::from_mutbl(m));
3897 let init_ty = self.expr_ty(init);
3898 self.demand_eqtype(init.span, init_ty, local_ty);
3900 self.check_expr_coercable_to_type(init, local_ty)
3904 pub fn check_decl_local(&self, local: &'gcx hir::Local) {
3905 let t = self.local_ty(local.span, local.id);
3906 self.write_ty(local.id, t);
3908 if let Some(ref init) = local.init {
3909 self.check_decl_initializer(local, &init);
3910 let init_ty = self.expr_ty(&init);
3911 if init_ty.references_error() {
3912 self.write_ty(local.id, init_ty);
3916 self.check_pat(&local.pat, t);
3917 let pat_ty = self.node_ty(local.pat.id);
3918 if pat_ty.references_error() {
3919 self.write_ty(local.id, pat_ty);
3923 pub fn check_stmt(&self, stmt: &'gcx hir::Stmt) {
3925 let mut saw_bot = false;
3926 let mut saw_err = false;
3928 hir::StmtDecl(ref decl, id) => {
3931 hir::DeclLocal(ref l) => {
3932 self.check_decl_local(&l);
3933 let l_t = self.node_ty(l.id);
3934 saw_bot = saw_bot || self.type_var_diverges(l_t);
3935 saw_err = saw_err || l_t.references_error();
3937 hir::DeclItem(_) => {/* ignore for now */ }
3940 hir::StmtExpr(ref expr, id) => {
3942 // Check with expected type of ()
3943 self.check_expr_has_type(&expr, self.tcx.mk_nil());
3944 let expr_ty = self.expr_ty(&expr);
3945 saw_bot = saw_bot || self.type_var_diverges(expr_ty);
3946 saw_err = saw_err || expr_ty.references_error();
3948 hir::StmtSemi(ref expr, id) => {
3950 self.check_expr(&expr);
3951 let expr_ty = self.expr_ty(&expr);
3952 saw_bot |= self.type_var_diverges(expr_ty);
3953 saw_err |= expr_ty.references_error();
3957 self.write_ty(node_id, self.next_diverging_ty_var());
3960 self.write_error(node_id);
3963 self.write_nil(node_id)
3967 pub fn check_block_no_value(&self, blk: &'gcx hir::Block) {
3968 self.check_block_with_expected(blk, ExpectHasType(self.tcx.mk_nil()));
3969 let blkty = self.node_ty(blk.id);
3970 if blkty.references_error() {
3971 self.write_error(blk.id);
3973 let nilty = self.tcx.mk_nil();
3974 self.demand_suptype(blk.span, nilty, blkty);
3978 fn check_block_with_expected(&self,
3979 blk: &'gcx hir::Block,
3980 expected: Expectation<'tcx>) {
3982 let mut fcx_ps = self.ps.borrow_mut();
3983 let unsafety_state = fcx_ps.recurse(blk);
3984 replace(&mut *fcx_ps, unsafety_state)
3987 let mut warned = false;
3988 let mut any_diverges = false;
3989 let mut any_err = false;
3990 for s in &blk.stmts {
3992 let s_id = s.node.id();
3993 let s_ty = self.node_ty(s_id);
3994 if any_diverges && !warned && match s.node {
3995 hir::StmtDecl(ref decl, _) => {
3997 hir::DeclLocal(_) => true,
4001 hir::StmtExpr(_, _) | hir::StmtSemi(_, _) => true,
4005 .add_lint(lint::builtin::UNREACHABLE_CODE,
4008 "unreachable statement".to_string());
4011 // FIXME(canndrew): This is_never should probably be an is_uninhabited
4012 any_diverges = any_diverges ||
4013 self.type_var_diverges(s_ty) ||
4015 any_err = any_err || s_ty.references_error();
4018 None => if any_err {
4019 self.write_error(blk.id);
4020 } else if any_diverges {
4021 self.write_ty(blk.id, self.next_diverging_ty_var());
4023 self.write_nil(blk.id);
4026 if any_diverges && !warned {
4029 .add_lint(lint::builtin::UNREACHABLE_CODE,
4032 "unreachable expression".to_string());
4034 let ety = match expected {
4035 ExpectHasType(ety) => {
4036 self.check_expr_coercable_to_type(&e, ety);
4040 self.check_expr_with_expectation(&e, expected);
4046 self.write_error(blk.id);
4047 } else if any_diverges {
4048 self.write_ty(blk.id, self.next_diverging_ty_var());
4050 self.write_ty(blk.id, ety);
4055 *self.ps.borrow_mut() = prev;
4058 // Instantiates the given path, which must refer to an item with the given
4059 // number of type parameters and type.
4060 pub fn instantiate_value_path(&self,
4061 segments: &[hir::PathSegment],
4062 opt_self_ty: Option<Ty<'tcx>>,
4065 node_id: ast::NodeId)
4067 debug!("instantiate_value_path(path={:?}, def={:?}, node_id={})",
4072 // We need to extract the type parameters supplied by the user in
4073 // the path `path`. Due to the current setup, this is a bit of a
4074 // tricky-process; the problem is that resolve only tells us the
4075 // end-point of the path resolution, and not the intermediate steps.
4076 // Luckily, we can (at least for now) deduce the intermediate steps
4077 // just from the end-point.
4079 // There are basically four cases to consider:
4081 // 1. Reference to a *type*, such as a struct or enum:
4083 // mod a { struct Foo<T> { ... } }
4085 // Because we don't allow types to be declared within one
4086 // another, a path that leads to a type will always look like
4087 // `a::b::Foo<T>` where `a` and `b` are modules. This implies
4088 // that only the final segment can have type parameters, and
4089 // they are located in the TypeSpace.
4091 // *Note:* Generally speaking, references to types don't
4092 // actually pass through this function, but rather the
4093 // `ast_ty_to_ty` function in `astconv`. However, in the case
4094 // of struct patterns (and maybe literals) we do invoke
4095 // `instantiate_value_path` to get the general type of an instance of
4096 // a struct. (In these cases, there are actually no type
4097 // parameters permitted at present, but perhaps we will allow
4098 // them in the future.)
4100 // 1b. Reference to an enum variant or tuple-like struct:
4102 // struct foo<T>(...)
4103 // enum E<T> { foo(...) }
4105 // In these cases, the parameters are declared in the type
4108 // 2. Reference to a *fn item*:
4112 // In this case, the path will again always have the form
4113 // `a::b::foo::<T>` where only the final segment should have
4114 // type parameters. However, in this case, those parameters are
4115 // declared on a value, and hence are in the `FnSpace`.
4117 // 3. Reference to a *method*:
4119 // impl<A> SomeStruct<A> {
4123 // Here we can have a path like
4124 // `a::b::SomeStruct::<A>::foo::<B>`, in which case parameters
4125 // may appear in two places. The penultimate segment,
4126 // `SomeStruct::<A>`, contains parameters in TypeSpace, and the
4127 // final segment, `foo::<B>` contains parameters in fn space.
4129 // 4. Reference to an *associated const*:
4131 // impl<A> AnotherStruct<A> {
4132 // const FOO: B = BAR;
4135 // The path in this case will look like
4136 // `a::b::AnotherStruct::<A>::FOO`, so the penultimate segment
4137 // only will have parameters in TypeSpace.
4139 // The first step then is to categorize the segments appropriately.
4141 assert!(!segments.is_empty());
4143 let mut ufcs_associated = None;
4144 let mut type_segment = None;
4145 let mut fn_segment = None;
4147 // Case 1 and 1b. Reference to a *type* or *enum variant*.
4148 Def::Struct(def_id) |
4149 Def::Variant(_, def_id) |
4151 Def::TyAlias(def_id) |
4152 Def::AssociatedTy(_, def_id) |
4153 Def::Trait(def_id) => {
4154 // Everything but the final segment should have no
4155 // parameters at all.
4156 let mut generics = self.tcx.lookup_generics(def_id);
4157 if let Some(def_id) = generics.parent {
4158 // Variant and struct constructors use the
4159 // generics of their parent type definition.
4160 generics = self.tcx.lookup_generics(def_id);
4162 type_segment = Some((segments.last().unwrap(), generics));
4165 // Case 2. Reference to a top-level value.
4167 Def::Const(def_id) |
4168 Def::Static(def_id, _) => {
4169 fn_segment = Some((segments.last().unwrap(),
4170 self.tcx.lookup_generics(def_id)));
4173 // Case 3. Reference to a method or associated const.
4174 Def::Method(def_id) |
4175 Def::AssociatedConst(def_id) => {
4176 let container = self.tcx.impl_or_trait_item(def_id).container();
4178 ty::TraitContainer(trait_did) => {
4179 callee::check_legal_trait_for_method_call(self.ccx, span, trait_did)
4181 ty::ImplContainer(_) => {}
4184 let generics = self.tcx.lookup_generics(def_id);
4185 if segments.len() >= 2 {
4186 let parent_generics = self.tcx.lookup_generics(generics.parent.unwrap());
4187 type_segment = Some((&segments[segments.len() - 2], parent_generics));
4189 // `<T>::assoc` will end up here, and so can `T::assoc`.
4190 let self_ty = opt_self_ty.expect("UFCS sugared assoc missing Self");
4191 ufcs_associated = Some((container, self_ty));
4193 fn_segment = Some((segments.last().unwrap(), generics));
4196 // Other cases. Various nonsense that really shouldn't show up
4197 // here. If they do, an error will have been reported
4198 // elsewhere. (I hope)
4200 Def::ForeignMod(..) |
4210 // In `<T as Trait<A, B>>::method`, `A` and `B` are mandatory, but
4211 // `opt_self_ty` can also be Some for `Foo::method`, where Foo's
4212 // type parameters are not mandatory.
4213 let require_type_space = opt_self_ty.is_some() && ufcs_associated.is_none();
4215 debug!("type_segment={:?} fn_segment={:?}", type_segment, fn_segment);
4217 // Now that we have categorized what space the parameters for each
4218 // segment belong to, let's sort out the parameters that the user
4219 // provided (if any) into their appropriate spaces. We'll also report
4220 // errors if type parameters are provided in an inappropriate place.
4221 let poly_segments = type_segment.is_some() as usize +
4222 fn_segment.is_some() as usize;
4223 self.tcx.prohibit_type_params(&segments[..segments.len() - poly_segments]);
4226 Def::Local(_, nid) | Def::Upvar(_, nid, _, _) => {
4227 let ty = self.local_ty(span, nid);
4228 let ty = self.normalize_associated_types_in(span, &ty);
4229 self.write_ty(node_id, ty);
4230 self.write_substs(node_id, ty::ItemSubsts {
4231 substs: Substs::empty(self.tcx)
4238 // Now we have to compare the types that the user *actually*
4239 // provided against the types that were *expected*. If the user
4240 // did not provide any types, then we want to substitute inference
4241 // variables. If the user provided some types, we may still need
4242 // to add defaults. If the user provided *too many* types, that's
4244 self.check_path_parameter_count(span, !require_type_space, &mut type_segment);
4245 self.check_path_parameter_count(span, true, &mut fn_segment);
4247 let substs = Substs::for_item(self.tcx, def.def_id(), |def, _| {
4248 let mut i = def.index as usize;
4249 let type_regions = match (type_segment, fn_segment) {
4250 (_, Some((_, generics))) => generics.parent_regions as usize,
4251 (Some((_, generics)), None) => generics.regions.len(),
4255 let segment = if i < type_regions {
4261 let lifetimes = match segment.map(|(s, _)| &s.parameters) {
4262 Some(&hir::AngleBracketedParameters(ref data)) => &data.lifetimes[..],
4263 Some(&hir::ParenthesizedParameters(_)) => bug!(),
4267 if let Some(ast_lifetime) = lifetimes.get(i) {
4268 ast_region_to_region(self.tcx, ast_lifetime)
4270 self.region_var_for_def(span, def)
4273 let mut i = def.index as usize;
4274 let (type_types, has_self) = match (type_segment, fn_segment) {
4275 (_, Some((_, generics))) => {
4276 (generics.parent_types as usize, generics.has_self)
4278 (Some((_, generics)), None) => {
4279 (generics.types.len(), generics.has_self)
4281 (None, None) => (0, false)
4284 let can_omit = i >= type_types || !require_type_space;
4285 let segment = if i < type_types {
4286 // Handle Self first, so we can adjust the index to match the AST.
4287 if has_self && i == 0 {
4288 return opt_self_ty.unwrap_or_else(|| {
4289 self.type_var_for_def(span, def, substs)
4292 i -= has_self as usize;
4298 let types = match segment.map(|(s, _)| &s.parameters) {
4299 Some(&hir::AngleBracketedParameters(ref data)) => &data.types[..],
4300 Some(&hir::ParenthesizedParameters(_)) => bug!(),
4304 let omitted = can_omit && types.is_empty();
4305 if let Some(ast_ty) = types.get(i) {
4306 // A provided type parameter.
4308 } else if let (false, Some(default)) = (omitted, def.default) {
4309 // No type parameter provided, but a default exists.
4310 default.subst_spanned(self.tcx, substs, Some(span))
4312 // No type parameters were provided, we can infer all.
4313 // This can also be reached in some error cases:
4314 // We prefer to use inference variables instead of
4315 // TyError to let type inference recover somewhat.
4316 self.type_var_for_def(span, def, substs)
4320 // The things we are substituting into the type should not contain
4321 // escaping late-bound regions, and nor should the base type scheme.
4322 let scheme = self.tcx.lookup_item_type(def.def_id());
4323 let type_predicates = self.tcx.lookup_predicates(def.def_id());
4324 assert!(!substs.has_escaping_regions());
4325 assert!(!scheme.ty.has_escaping_regions());
4327 // Add all the obligations that are required, substituting and
4328 // normalized appropriately.
4329 let bounds = self.instantiate_bounds(span, &substs, &type_predicates);
4330 self.add_obligations_for_parameters(
4331 traits::ObligationCause::new(span, self.body_id, traits::ItemObligation(def.def_id())),
4334 // Substitute the values for the type parameters into the type of
4335 // the referenced item.
4336 let ty_substituted = self.instantiate_type_scheme(span, &substs, &scheme.ty);
4339 if let Some((ty::ImplContainer(impl_def_id), self_ty)) = ufcs_associated {
4340 // In the case of `Foo<T>::method` and `<Foo<T>>::method`, if `method`
4341 // is inherent, there is no `Self` parameter, instead, the impl needs
4342 // type parameters, which we can infer by unifying the provided `Self`
4343 // with the substituted impl type.
4344 let impl_scheme = self.tcx.lookup_item_type(impl_def_id);
4346 let impl_ty = self.instantiate_type_scheme(span, &substs, &impl_scheme.ty);
4347 match self.sub_types(false, TypeOrigin::Misc(span), self_ty, impl_ty) {
4348 Ok(InferOk { obligations, .. }) => {
4349 // FIXME(#32730) propagate obligations
4350 assert!(obligations.is_empty());
4354 "instantiate_value_path: (UFCS) {:?} was a subtype of {:?} but now is not?",
4361 debug!("instantiate_value_path: type of {:?} is {:?}",
4364 self.write_ty(node_id, ty_substituted);
4365 self.write_substs(node_id, ty::ItemSubsts {
4371 /// Report errors if the provided parameters are too few or too many.
4372 fn check_path_parameter_count(&self,
4375 segment: &mut Option<(&hir::PathSegment, &ty::Generics)>) {
4376 let (lifetimes, types, bindings) = match segment.map(|(s, _)| &s.parameters) {
4377 Some(&hir::AngleBracketedParameters(ref data)) => {
4378 (&data.lifetimes[..], &data.types[..], &data.bindings[..])
4380 Some(&hir::ParenthesizedParameters(_)) => {
4381 span_bug!(span, "parenthesized parameters cannot appear in ExprPath");
4383 None => (&[][..], &[][..], &[][..])
4387 format!("{} parameter{}", n, if n == 1 { "" } else { "s" })
4390 // Check provided lifetime parameters.
4391 let lifetime_defs = segment.map_or(&[][..], |(_, generics)| &generics.regions);
4392 if lifetimes.len() > lifetime_defs.len() {
4393 let span = lifetimes[lifetime_defs.len()].span;
4394 span_err!(self.tcx.sess, span, E0088,
4395 "too many lifetime parameters provided: \
4396 expected {}, found {}",
4397 count(lifetime_defs.len()),
4398 count(lifetimes.len()));
4399 } else if lifetimes.len() > 0 && lifetimes.len() < lifetime_defs.len() {
4400 span_err!(self.tcx.sess, span, E0090,
4401 "too few lifetime parameters provided: \
4402 expected {}, found {}",
4403 count(lifetime_defs.len()),
4404 count(lifetimes.len()));
4407 // Check provided type parameters.
4408 let type_defs = segment.map_or(&[][..], |(_, generics)| {
4409 if generics.parent.is_none() {
4410 &generics.types[generics.has_self as usize..]
4415 let required_len = type_defs.iter()
4416 .take_while(|d| d.default.is_none())
4418 if types.len() > type_defs.len() {
4419 let span = types[type_defs.len()].span;
4420 struct_span_err!(self.tcx.sess, span, E0087,
4421 "too many type parameters provided: \
4422 expected at most {}, found {}",
4423 count(type_defs.len()),
4425 .span_label(span, &format!("too many type parameters")).emit();
4427 // To prevent derived errors to accumulate due to extra
4428 // type parameters, we force instantiate_value_path to
4429 // use inference variables instead of the provided types.
4431 } else if !(can_omit && types.len() == 0) && types.len() < required_len {
4433 if type_defs.len() != required_len { "at least " } else { "" };
4434 span_err!(self.tcx.sess, span, E0089,
4435 "too few type parameters provided: \
4436 expected {}{}, found {}",
4438 count(required_len),
4439 count(types.len()));
4442 if !bindings.is_empty() {
4443 span_err!(self.tcx.sess, bindings[0].span, E0182,
4444 "unexpected binding of associated item in expression path \
4445 (only allowed in type paths)");
4449 fn structurally_resolve_type_or_else<F>(&self, sp: Span, ty: Ty<'tcx>, f: F)
4451 where F: Fn() -> Ty<'tcx>
4453 let mut ty = self.resolve_type_vars_with_obligations(ty);
4456 let alternative = f();
4459 if alternative.is_ty_var() || alternative.references_error() {
4460 if !self.is_tainted_by_errors() {
4461 self.type_error_message(sp, |_actual| {
4462 "the type of this value must be known in this context".to_string()
4465 self.demand_suptype(sp, self.tcx.types.err, ty);
4466 ty = self.tcx.types.err;
4468 self.demand_suptype(sp, alternative, ty);
4476 // Resolves `typ` by a single level if `typ` is a type variable. If no
4477 // resolution is possible, then an error is reported.
4478 pub fn structurally_resolved_type(&self, sp: Span, ty: Ty<'tcx>) -> Ty<'tcx> {
4479 self.structurally_resolve_type_or_else(sp, ty, || {
4485 // Returns true if b contains a break that can exit from b
4486 pub fn may_break(tcx: TyCtxt, id: ast::NodeId, b: &hir::Block) -> bool {
4487 // First: is there an unlabeled break immediately
4489 (loop_query(&b, |e| {
4491 hir::ExprBreak(None) => true,
4495 // Second: is there a labeled break with label
4496 // <id> nested anywhere inside the loop?
4497 (block_query(b, |e| {
4498 if let hir::ExprBreak(Some(_)) = e.node {
4499 tcx.expect_def(e.id) == Def::Label(id)
4506 pub fn check_bounds_are_used<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
4507 tps: &[hir::TyParam],
4509 debug!("check_bounds_are_used(n_tps={}, ty={:?})",
4512 // make a vector of booleans initially false, set to true when used
4513 if tps.is_empty() { return; }
4514 let mut tps_used = vec![false; tps.len()];
4516 for leaf_ty in ty.walk() {
4517 if let ty::TyParam(ParamTy {idx, ..}) = leaf_ty.sty {
4518 debug!("Found use of ty param num {}", idx);
4519 tps_used[idx as usize] = true;
4523 for (i, b) in tps_used.iter().enumerate() {
4525 struct_span_err!(ccx.tcx.sess, tps[i].span, E0091,
4526 "type parameter `{}` is unused",
4528 .span_label(tps[i].span, &format!("unused type parameter"))