1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
15 Within the check phase of type check, we check each item one at a time
16 (bodies of function expressions are checked as part of the containing
17 function). Inference is used to supply types wherever they are
20 By far the most complex case is checking the body of a function. This
21 can be broken down into several distinct phases:
23 - gather: creates type variables to represent the type of each local
24 variable and pattern binding.
26 - main: the main pass does the lion's share of the work: it
27 determines the types of all expressions, resolves
28 methods, checks for most invalid conditions, and so forth. In
29 some cases, where a type is unknown, it may create a type or region
30 variable and use that as the type of an expression.
32 In the process of checking, various constraints will be placed on
33 these type variables through the subtyping relationships requested
34 through the `demand` module. The `infer` module is in charge
35 of resolving those constraints.
37 - regionck: after main is complete, the regionck pass goes over all
38 types looking for regions and making sure that they did not escape
39 into places they are not in scope. This may also influence the
40 final assignments of the various region variables if there is some
43 - vtable: find and records the impls to use for each trait bound that
44 appears on a type parameter.
46 - writeback: writes the final types within a function body, replacing
47 type variables with their final inferred types. These final types
48 are written into the `tcx.node_types` table, which should *never* contain
49 any reference to a type variable.
53 While type checking a function, the intermediate types for the
54 expressions, blocks, and so forth contained within the function are
55 stored in `fcx.node_types` and `fcx.item_substs`. These types
56 may contain unresolved type variables. After type checking is
57 complete, the functions in the writeback module are used to take the
58 types from this table, resolve them, and then write them into their
59 permanent home in the type context `ccx.tcx`.
61 This means that during inferencing you should use `fcx.write_ty()`
62 and `fcx.expr_ty()` / `fcx.node_ty()` to write/obtain the types of
63 nodes within the function.
65 The types of top-level items, which never contain unbound type
66 variables, are stored directly into the `tcx` tables.
68 n.b.: A type variable is not the same thing as a type parameter. A
69 type variable is rather an "instance" of a type parameter: that is,
70 given a generic function `fn foo<T>(t: T)`: while checking the
71 function `foo`, the type `ty_param(0)` refers to the type `T`, which
72 is treated in abstract. When `foo()` is called, however, `T` will be
73 substituted for a fresh type variable `N`. This variable will
74 eventually be resolved to some concrete type (which might itself be
79 pub use self::Expectation::*;
80 pub use self::compare_method::{compare_impl_method, compare_const_impl};
81 use self::TupleArgumentsFlag::*;
83 use astconv::{self, ast_region_to_region, ast_ty_to_ty, AstConv, PathParamMode};
84 use check::_match::pat_ctxt;
85 use fmt_macros::{Parser, Piece, Position};
86 use middle::astconv_util::prohibit_type_params;
88 use middle::def_id::{DefId, LOCAL_CRATE};
90 use middle::infer::type_variable;
91 use middle::pat_util::{self, pat_id_map};
92 use middle::privacy::{AllPublic, LastMod};
93 use middle::subst::{self, Subst, Substs, VecPerParamSpace, ParamSpace, TypeSpace};
94 use middle::traits::{self, report_fulfillment_errors};
95 use middle::ty::{FnSig, GenericPredicates, TypeScheme};
96 use middle::ty::{Disr, ParamTy, ParameterEnvironment};
97 use middle::ty::{LvaluePreference, NoPreference, PreferMutLvalue};
98 use middle::ty::{self, HasTypeFlags, RegionEscape, ToPolyTraitRef, Ty};
99 use middle::ty::{MethodCall, MethodCallee};
100 use middle::ty::adjustment;
101 use middle::ty::error::TypeError;
102 use middle::ty::fold::{TypeFolder, TypeFoldable};
103 use middle::ty::util::Representability;
104 use require_c_abi_if_variadic;
105 use rscope::{ElisionFailureInfo, RegionScope};
106 use session::Session;
107 use {CrateCtxt, lookup_full_def};
110 use util::common::{block_query, ErrorReported, indenter, loop_query};
111 use util::nodemap::{DefIdMap, FnvHashMap, NodeMap};
112 use util::lev_distance::lev_distance;
114 use std::cell::{Cell, Ref, RefCell};
115 use std::collections::{HashSet};
116 use std::mem::replace;
120 use syntax::codemap::{self, Span};
121 use syntax::owned_slice::OwnedSlice;
122 use syntax::parse::token::{self, InternedString};
125 use rustc_front::visit::{self, Visitor};
126 use rustc_front::hir;
127 use rustc_front::hir::Visibility;
128 use rustc_front::attr;
129 use rustc_front::attr::AttrMetaMethods;
130 use rustc_front::hir::{Item, ItemImpl};
131 use rustc_front::print::pprust;
151 /// closures defined within the function. For example:
154 /// bar(move|| { ... })
157 /// Here, the function `foo()` and the closure passed to
158 /// `bar()` will each have their own `FnCtxt`, but they will
159 /// share the inherited fields.
160 pub struct Inherited<'a, 'tcx: 'a> {
161 infcx: infer::InferCtxt<'a, 'tcx>,
162 locals: RefCell<NodeMap<Ty<'tcx>>>,
164 tables: &'a RefCell<ty::Tables<'tcx>>,
166 // A mapping from each fn's id to its signature, with all bound
167 // regions replaced with free ones. Unlike the other tables, this
168 // one is never copied into the tcx: it is only used by regionck.
169 fn_sig_map: RefCell<NodeMap<Vec<Ty<'tcx>>>>,
171 // When we process a call like `c()` where `c` is a closure type,
172 // we may not have decided yet whether `c` is a `Fn`, `FnMut`, or
173 // `FnOnce` closure. In that case, we defer full resolution of the
174 // call until upvar inference can kick in and make the
175 // decision. We keep these deferred resolutions grouped by the
176 // def-id of the closure, so that once we decide, we can easily go
177 // back and process them.
178 deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolutionHandler<'tcx>>>>,
180 deferred_cast_checks: RefCell<Vec<cast::CastCheck<'tcx>>>,
183 trait DeferredCallResolution<'tcx> {
184 fn resolve<'a>(&mut self, fcx: &FnCtxt<'a,'tcx>);
187 type DeferredCallResolutionHandler<'tcx> = Box<DeferredCallResolution<'tcx>+'tcx>;
189 /// When type-checking an expression, we propagate downward
190 /// whatever type hint we are able in the form of an `Expectation`.
191 #[derive(Copy, Clone, Debug)]
192 pub enum Expectation<'tcx> {
193 /// We know nothing about what type this expression should have.
196 /// This expression should have the type given (or some subtype)
197 ExpectHasType(Ty<'tcx>),
199 /// This expression will be cast to the `Ty`
200 ExpectCastableToType(Ty<'tcx>),
202 /// This rvalue expression will be wrapped in `&` or `Box` and coerced
203 /// to `&Ty` or `Box<Ty>`, respectively. `Ty` is `[A]` or `Trait`.
204 ExpectRvalueLikeUnsized(Ty<'tcx>),
207 impl<'tcx> Expectation<'tcx> {
208 // Disregard "castable to" expectations because they
209 // can lead us astray. Consider for example `if cond
210 // {22} else {c} as u8` -- if we propagate the
211 // "castable to u8" constraint to 22, it will pick the
212 // type 22u8, which is overly constrained (c might not
213 // be a u8). In effect, the problem is that the
214 // "castable to" expectation is not the tightest thing
215 // we can say, so we want to drop it in this case.
216 // The tightest thing we can say is "must unify with
217 // else branch". Note that in the case of a "has type"
218 // constraint, this limitation does not hold.
220 // If the expected type is just a type variable, then don't use
221 // an expected type. Otherwise, we might write parts of the type
222 // when checking the 'then' block which are incompatible with the
224 fn adjust_for_branches<'a>(&self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> {
226 ExpectHasType(ety) => {
227 let ety = fcx.infcx().shallow_resolve(ety);
228 if !ety.is_ty_var() {
234 ExpectRvalueLikeUnsized(ety) => {
235 ExpectRvalueLikeUnsized(ety)
242 #[derive(Copy, Clone)]
243 pub struct UnsafetyState {
244 pub def: ast::NodeId,
245 pub unsafety: hir::Unsafety,
246 pub unsafe_push_count: u32,
251 pub fn function(unsafety: hir::Unsafety, def: ast::NodeId) -> UnsafetyState {
252 UnsafetyState { def: def, unsafety: unsafety, unsafe_push_count: 0, from_fn: true }
255 pub fn recurse(&mut self, blk: &hir::Block) -> UnsafetyState {
256 match self.unsafety {
257 // If this unsafe, then if the outer function was already marked as
258 // unsafe we shouldn't attribute the unsafe'ness to the block. This
259 // way the block can be warned about instead of ignoring this
260 // extraneous block (functions are never warned about).
261 hir::Unsafety::Unsafe if self.from_fn => *self,
264 let (unsafety, def, count) = match blk.rules {
265 hir::PushUnsafeBlock(..) =>
266 (unsafety, blk.id, self.unsafe_push_count.checked_add(1).unwrap()),
267 hir::PopUnsafeBlock(..) =>
268 (unsafety, blk.id, self.unsafe_push_count.checked_sub(1).unwrap()),
269 hir::UnsafeBlock(..) =>
270 (hir::Unsafety::Unsafe, blk.id, self.unsafe_push_count),
272 (unsafety, self.def, self.unsafe_push_count),
274 UnsafetyState{ def: def,
276 unsafe_push_count: count,
284 pub struct FnCtxt<'a, 'tcx: 'a> {
285 body_id: ast::NodeId,
287 // This flag is set to true if, during the writeback phase, we encounter
288 // a type error in this function.
289 writeback_errors: Cell<bool>,
291 // Number of errors that had been reported when we started
292 // checking this function. On exit, if we find that *more* errors
293 // have been reported, we will skip regionck and other work that
294 // expects the types within the function to be consistent.
295 err_count_on_creation: usize,
297 ret_ty: ty::FnOutput<'tcx>,
299 ps: RefCell<UnsafetyState>,
301 inh: &'a Inherited<'a, 'tcx>,
303 ccx: &'a CrateCtxt<'a, 'tcx>,
306 impl<'a, 'tcx> Inherited<'a, 'tcx> {
307 fn new(tcx: &'a ty::ctxt<'tcx>,
308 tables: &'a RefCell<ty::Tables<'tcx>>,
309 param_env: ty::ParameterEnvironment<'a, 'tcx>)
310 -> Inherited<'a, 'tcx> {
313 infcx: infer::new_infer_ctxt(tcx, tables, Some(param_env), true),
314 locals: RefCell::new(NodeMap()),
316 fn_sig_map: RefCell::new(NodeMap()),
317 deferred_call_resolutions: RefCell::new(DefIdMap()),
318 deferred_cast_checks: RefCell::new(Vec::new()),
322 fn normalize_associated_types_in<T>(&self,
324 body_id: ast::NodeId,
327 where T : TypeFoldable<'tcx> + HasTypeFlags
329 let mut fulfillment_cx = self.infcx.fulfillment_cx.borrow_mut();
330 assoc::normalize_associated_types_in(&self.infcx,
339 // Used by check_const and check_enum_variants
340 pub fn blank_fn_ctxt<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
341 inh: &'a Inherited<'a, 'tcx>,
342 rty: ty::FnOutput<'tcx>,
343 body_id: ast::NodeId)
344 -> FnCtxt<'a, 'tcx> {
347 writeback_errors: Cell::new(false),
348 err_count_on_creation: ccx.tcx.sess.err_count(),
350 ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, 0)),
356 fn static_inherited_fields<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
357 tables: &'a RefCell<ty::Tables<'tcx>>)
358 -> Inherited<'a, 'tcx> {
359 // It's kind of a kludge to manufacture a fake function context
360 // and statement context, but we might as well do write the code only once
361 let param_env = ccx.tcx.empty_parameter_environment();
362 Inherited::new(ccx.tcx, &tables, param_env)
365 struct CheckItemTypesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
366 struct CheckItemBodiesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
368 impl<'a, 'tcx> Visitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> {
369 fn visit_item(&mut self, i: &'tcx hir::Item) {
370 check_item_type(self.ccx, i);
371 visit::walk_item(self, i);
374 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
376 hir::TyFixedLengthVec(_, ref expr) => {
377 check_const_in_type(self.ccx, &**expr, self.ccx.tcx.types.usize);
382 visit::walk_ty(self, t);
386 impl<'a, 'tcx> Visitor<'tcx> for CheckItemBodiesVisitor<'a, 'tcx> {
387 fn visit_item(&mut self, i: &'tcx hir::Item) {
388 check_item_body(self.ccx, i);
389 visit::walk_item(self, i);
393 pub fn check_wf_old(ccx: &CrateCtxt) {
394 // FIXME(#25759). The new code below is much more reliable but (for now)
395 // only generates warnings. So as to ensure that we continue
396 // getting errors where we used to get errors, we run the old wf
397 // code first and abort if it encounters any errors. If no abort
398 // comes, we run the new code and issue warnings.
399 let krate = ccx.tcx.map.krate();
400 let mut visit = wf::CheckTypeWellFormedVisitor::new(ccx);
401 visit::walk_crate(&mut visit, krate);
403 // If types are not well-formed, it leads to all manner of errors
404 // downstream, so stop reporting errors at this point.
405 ccx.tcx.sess.abort_if_errors();
408 pub fn check_wf_new(ccx: &CrateCtxt) {
409 let krate = ccx.tcx.map.krate();
410 let mut visit = wfcheck::CheckTypeWellFormedVisitor::new(ccx);
411 visit::walk_crate(&mut visit, krate);
413 // If types are not well-formed, it leads to all manner of errors
414 // downstream, so stop reporting errors at this point.
415 ccx.tcx.sess.abort_if_errors();
418 pub fn check_item_types(ccx: &CrateCtxt) {
419 let krate = ccx.tcx.map.krate();
420 let mut visit = CheckItemTypesVisitor { ccx: ccx };
421 visit::walk_crate(&mut visit, krate);
422 ccx.tcx.sess.abort_if_errors();
425 pub fn check_item_bodies(ccx: &CrateCtxt) {
426 let krate = ccx.tcx.map.krate();
427 let mut visit = CheckItemBodiesVisitor { ccx: ccx };
428 visit::walk_crate(&mut visit, krate);
430 ccx.tcx.sess.abort_if_errors();
433 pub fn check_drop_impls(ccx: &CrateCtxt) {
434 for drop_method_did in ccx.tcx.destructors.borrow().iter() {
435 if drop_method_did.is_local() {
436 let drop_impl_did = ccx.tcx.map.get_parent_did(drop_method_did.node);
437 match dropck::check_drop_impl(ccx.tcx, drop_impl_did) {
440 assert!(ccx.tcx.sess.has_errors());
446 ccx.tcx.sess.abort_if_errors();
449 fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
450 decl: &'tcx hir::FnDecl,
451 body: &'tcx hir::Block,
455 param_env: ty::ParameterEnvironment<'a, 'tcx>)
458 ty::TyBareFn(_, ref fn_ty) => {
459 let tables = RefCell::new(ty::Tables::empty());
460 let inh = Inherited::new(ccx.tcx, &tables, param_env);
462 // Compute the fty from point of view of inside fn.
463 let fn_scope = ccx.tcx.region_maps.item_extent(body.id);
465 fn_ty.sig.subst(ccx.tcx, &inh.infcx.parameter_environment.free_substs);
467 ccx.tcx.liberate_late_bound_regions(fn_scope, &fn_sig);
469 inh.normalize_associated_types_in(body.span,
473 let fcx = check_fn(ccx, fn_ty.unsafety, fn_id, &fn_sig,
474 decl, fn_id, body, &inh);
476 fcx.select_all_obligations_and_apply_defaults();
477 upvar::closure_analyze_fn(&fcx, fn_id, decl, body);
478 fcx.select_obligations_where_possible();
480 fcx.select_all_obligations_or_error(); // Casts can introduce new obligations.
482 regionck::regionck_fn(&fcx, fn_id, fn_span, decl, body);
483 writeback::resolve_type_vars_in_fn(&fcx, decl, body);
485 _ => ccx.tcx.sess.impossible_case(body.span,
486 "check_bare_fn: function type expected")
490 struct GatherLocalsVisitor<'a, 'tcx: 'a> {
491 fcx: &'a FnCtxt<'a, 'tcx>
494 impl<'a, 'tcx> GatherLocalsVisitor<'a, 'tcx> {
495 fn assign(&mut self, _span: Span, nid: ast::NodeId, ty_opt: Option<Ty<'tcx>>) -> Ty<'tcx> {
498 // infer the variable's type
499 let var_ty = self.fcx.infcx().next_ty_var();
500 self.fcx.inh.locals.borrow_mut().insert(nid, var_ty);
504 // take type that the user specified
505 self.fcx.inh.locals.borrow_mut().insert(nid, typ);
512 impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> {
513 // Add explicitly-declared locals.
514 fn visit_local(&mut self, local: &'tcx hir::Local) {
515 let o_ty = match local.ty {
516 Some(ref ty) => Some(self.fcx.to_ty(&**ty)),
519 self.assign(local.span, local.id, o_ty);
520 debug!("Local variable {:?} is assigned type {}",
522 self.fcx.infcx().ty_to_string(
523 self.fcx.inh.locals.borrow().get(&local.id).unwrap().clone()));
524 visit::walk_local(self, local);
527 // Add pattern bindings.
528 fn visit_pat(&mut self, p: &'tcx hir::Pat) {
529 if let hir::PatIdent(_, ref path1, _) = p.node {
530 if pat_util::pat_is_binding(&self.fcx.ccx.tcx.def_map, p) {
531 let var_ty = self.assign(p.span, p.id, None);
533 self.fcx.require_type_is_sized(var_ty, p.span,
534 traits::VariableType(p.id));
536 debug!("Pattern binding {} is assigned to {} with type {:?}",
538 self.fcx.infcx().ty_to_string(
539 self.fcx.inh.locals.borrow().get(&p.id).unwrap().clone()),
543 visit::walk_pat(self, p);
546 fn visit_block(&mut self, b: &'tcx hir::Block) {
547 // non-obvious: the `blk` variable maps to region lb, so
548 // we have to keep this up-to-date. This
549 // is... unfortunate. It'd be nice to not need this.
550 visit::walk_block(self, b);
553 // Since an expr occurs as part of the type fixed size arrays we
554 // need to record the type for that node
555 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
557 hir::TyFixedLengthVec(ref ty, ref count_expr) => {
558 self.visit_ty(&**ty);
559 check_expr_with_hint(self.fcx, &**count_expr, self.fcx.tcx().types.usize);
561 _ => visit::walk_ty(self, t)
565 // Don't descend into fns and items
566 fn visit_fn(&mut self, _: visit::FnKind<'tcx>, _: &'tcx hir::FnDecl,
567 _: &'tcx hir::Block, _: Span, _: ast::NodeId) { }
568 fn visit_item(&mut self, _: &hir::Item) { }
572 /// Helper used by check_bare_fn and check_expr_fn. Does the grungy work of checking a function
573 /// body and returns the function context used for that purpose, since in the case of a fn item
574 /// there is still a bit more to do.
577 /// * inherited: other fields inherited from the enclosing fn (if any)
578 fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
579 unsafety: hir::Unsafety,
580 unsafety_id: ast::NodeId,
581 fn_sig: &ty::FnSig<'tcx>,
582 decl: &'tcx hir::FnDecl,
584 body: &'tcx hir::Block,
585 inherited: &'a Inherited<'a, 'tcx>)
589 let err_count_on_creation = tcx.sess.err_count();
591 let arg_tys = &fn_sig.inputs;
592 let ret_ty = fn_sig.output;
594 debug!("check_fn(arg_tys={:?}, ret_ty={:?}, fn_id={})",
599 // Create the function context. This is either derived from scratch or,
600 // in the case of function expressions, based on the outer context.
603 writeback_errors: Cell::new(false),
604 err_count_on_creation: err_count_on_creation,
606 ps: RefCell::new(UnsafetyState::function(unsafety, unsafety_id)),
611 // Remember return type so that regionck can access it later.
612 let mut fn_sig_tys: Vec<Ty> =
617 if let ty::FnConverging(ret_ty) = ret_ty {
618 fcx.require_type_is_sized(ret_ty, decl.output.span(), traits::ReturnType);
619 fn_sig_tys.push(ret_ty); // FIXME(#25759) just take implied bounds from the arguments
622 debug!("fn-sig-map: fn_id={} fn_sig_tys={:?}",
626 inherited.fn_sig_map.borrow_mut().insert(fn_id, fn_sig_tys);
629 let mut visit = GatherLocalsVisitor { fcx: &fcx, };
631 // Add formal parameters.
632 for (arg_ty, input) in arg_tys.iter().zip(&decl.inputs) {
633 // The type of the argument must be well-formed.
635 // NB -- this is now checked in wfcheck, but that
636 // currently only results in warnings, so we issue an
637 // old-style WF obligation here so that we still get the
638 // errors that we used to get.
639 fcx.register_old_wf_obligation(arg_ty, input.ty.span, traits::MiscObligation);
641 // Create type variables for each argument.
642 pat_util::pat_bindings(
645 |_bm, pat_id, sp, _path| {
646 let var_ty = visit.assign(sp, pat_id, None);
647 fcx.require_type_is_sized(var_ty, sp,
648 traits::VariableType(pat_id));
651 // Check the pattern.
654 map: pat_id_map(&tcx.def_map, &*input.pat),
656 _match::check_pat(&pcx, &*input.pat, *arg_ty);
659 visit.visit_block(body);
662 check_block_with_expected(&fcx, body, match ret_ty {
663 ty::FnConverging(result_type) => ExpectHasType(result_type),
664 ty::FnDiverging => NoExpectation
667 for (input, arg) in decl.inputs.iter().zip(arg_tys) {
668 fcx.write_ty(input.id, arg);
674 pub fn check_struct(ccx: &CrateCtxt, id: ast::NodeId, span: Span) {
677 check_representable(tcx, span, id, "struct");
679 if tcx.lookup_simd(DefId::local(id)) {
680 check_simd(tcx, span, id);
684 pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
685 debug!("check_item_type(it.id={}, it.ident={})",
687 ccx.tcx.item_path_str(DefId::local(it.id)));
688 let _indenter = indenter();
690 // Consts can play a role in type-checking, so they are included here.
691 hir::ItemStatic(_, _, ref e) |
692 hir::ItemConst(_, ref e) => check_const(ccx, it.span, &**e, it.id),
693 hir::ItemEnum(ref enum_definition, _) => {
694 check_enum_variants(ccx,
696 &enum_definition.variants,
699 hir::ItemFn(..) => {} // entirely within check_item_body
700 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
701 debug!("ItemImpl {} with id {}", it.ident, it.id);
702 match ccx.tcx.impl_trait_ref(DefId::local(it.id)) {
703 Some(impl_trait_ref) => {
704 check_impl_items_against_trait(ccx,
712 hir::ItemTrait(_, ref generics, _, _) => {
713 check_trait_on_unimplemented(ccx, generics, it);
715 hir::ItemStruct(..) => {
716 check_struct(ccx, it.id, it.span);
718 hir::ItemTy(ref t, ref generics) => {
719 let pty_ty = ccx.tcx.node_id_to_type(it.id);
720 check_bounds_are_used(ccx, t.span, &generics.ty_params, pty_ty);
722 hir::ItemForeignMod(ref m) => {
723 if m.abi == abi::RustIntrinsic {
724 for item in &m.items {
725 intrinsic::check_intrinsic_type(ccx, &**item);
727 } else if m.abi == abi::PlatformIntrinsic {
728 for item in &m.items {
729 intrinsic::check_platform_intrinsic_type(ccx, &**item);
732 for item in &m.items {
733 let pty = ccx.tcx.lookup_item_type(DefId::local(item.id));
734 if !pty.generics.types.is_empty() {
735 span_err!(ccx.tcx.sess, item.span, E0044,
736 "foreign items may not have type parameters");
737 span_help!(ccx.tcx.sess, item.span,
738 "consider using specialization instead of \
742 if let hir::ForeignItemFn(ref fn_decl, _) = item.node {
743 require_c_abi_if_variadic(ccx.tcx, fn_decl, m.abi, item.span);
748 _ => {/* nothing to do */ }
752 pub fn check_item_body<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
753 debug!("check_item_body(it.id={}, it.ident={})",
755 ccx.tcx.item_path_str(DefId::local(it.id)));
756 let _indenter = indenter();
758 hir::ItemFn(ref decl, _, _, _, _, ref body) => {
759 let fn_pty = ccx.tcx.lookup_item_type(DefId::local(it.id));
760 let param_env = ParameterEnvironment::for_item(ccx.tcx, it.id);
761 check_bare_fn(ccx, &**decl, &**body, it.id, it.span, fn_pty.ty, param_env);
763 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
764 debug!("ItemImpl {} with id {}", it.ident, it.id);
766 let impl_pty = ccx.tcx.lookup_item_type(DefId::local(it.id));
768 for impl_item in impl_items {
769 match impl_item.node {
770 hir::ConstImplItem(_, ref expr) => {
771 check_const(ccx, impl_item.span, &*expr, impl_item.id)
773 hir::MethodImplItem(ref sig, ref body) => {
774 check_method_body(ccx, &impl_pty.generics, sig, body,
775 impl_item.id, impl_item.span);
777 hir::TypeImplItem(_) => {
778 // Nothing to do here.
783 hir::ItemTrait(_, _, _, ref trait_items) => {
784 let trait_def = ccx.tcx.lookup_trait_def(DefId::local(it.id));
785 for trait_item in trait_items {
786 match trait_item.node {
787 hir::ConstTraitItem(_, Some(ref expr)) => {
788 check_const(ccx, trait_item.span, &*expr, trait_item.id)
790 hir::MethodTraitItem(ref sig, Some(ref body)) => {
791 check_trait_fn_not_const(ccx, trait_item.span, sig.constness);
793 check_method_body(ccx, &trait_def.generics, sig, body,
794 trait_item.id, trait_item.span);
796 hir::MethodTraitItem(ref sig, None) => {
797 check_trait_fn_not_const(ccx, trait_item.span, sig.constness);
799 hir::ConstTraitItem(_, None) |
800 hir::TypeTraitItem(..) => {
806 _ => {/* nothing to do */ }
810 fn check_trait_fn_not_const<'a,'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
812 constness: hir::Constness)
815 hir::Constness::NotConst => {
818 hir::Constness::Const => {
819 span_err!(ccx.tcx.sess, span, E0379, "trait fns cannot be declared const");
824 fn check_trait_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
825 generics: &hir::Generics,
827 if let Some(ref attr) = item.attrs.iter().find(|a| {
828 a.check_name("rustc_on_unimplemented")
830 if let Some(ref istring) = attr.value_str() {
831 let parser = Parser::new(&istring);
832 let types = &*generics.ty_params;
833 for token in parser {
835 Piece::String(_) => (), // Normal string, no need to check it
836 Piece::NextArgument(a) => match a.position {
837 // `{Self}` is allowed
838 Position::ArgumentNamed(s) if s == "Self" => (),
839 // So is `{A}` if A is a type parameter
840 Position::ArgumentNamed(s) => match types.iter().find(|t| {
845 span_err!(ccx.tcx.sess, attr.span, E0230,
846 "there is no type parameter \
851 // `{:1}` and `{}` are not to be used
852 Position::ArgumentIs(_) | Position::ArgumentNext => {
853 span_err!(ccx.tcx.sess, attr.span, E0231,
854 "only named substitution \
855 parameters are allowed");
861 span_err!(ccx.tcx.sess, attr.span, E0232,
862 "this attribute must have a value, \
863 eg `#[rustc_on_unimplemented = \"foo\"]`")
868 /// Type checks a method body.
872 /// * `item_generics`: generics defined on the impl/trait that contains
874 /// * `self_bound`: bound for the `Self` type parameter, if any
875 /// * `method`: the method definition
876 fn check_method_body<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
877 item_generics: &ty::Generics<'tcx>,
878 sig: &'tcx hir::MethodSig,
879 body: &'tcx hir::Block,
880 id: ast::NodeId, span: Span) {
881 debug!("check_method_body(item_generics={:?}, id={})",
883 let param_env = ParameterEnvironment::for_item(ccx.tcx, id);
885 let fty = ccx.tcx.node_id_to_type(id);
886 debug!("check_method_body: fty={:?}", fty);
888 check_bare_fn(ccx, &sig.decl, body, id, span, fty, param_env);
891 fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
893 impl_trait_ref: &ty::TraitRef<'tcx>,
894 impl_items: &[P<hir::ImplItem>]) {
895 // Locate trait methods
897 let trait_items = tcx.trait_items(impl_trait_ref.def_id);
898 let mut overridden_associated_type = None;
900 // Check existing impl methods to see if they are both present in trait
901 // and compatible with trait signature
902 for impl_item in impl_items {
903 let ty_impl_item = ccx.tcx.impl_or_trait_item(DefId::local(impl_item.id));
904 let ty_trait_item = trait_items.iter()
905 .find(|ac| ac.name() == ty_impl_item.name())
907 // This is checked by resolve
908 tcx.sess.span_bug(impl_item.span,
909 &format!("impl-item `{}` is not a member of `{:?}`",
913 match impl_item.node {
914 hir::ConstImplItem(..) => {
915 let impl_const = match ty_impl_item {
916 ty::ConstTraitItem(ref cti) => cti,
917 _ => tcx.sess.span_bug(impl_item.span, "non-const impl-item for const")
920 // Find associated const definition.
921 if let &ty::ConstTraitItem(ref trait_const) = ty_trait_item {
922 compare_const_impl(ccx.tcx,
928 span_err!(tcx.sess, impl_item.span, E0323,
929 "item `{}` is an associated const, \
930 which doesn't match its trait `{:?}`",
935 hir::MethodImplItem(ref sig, ref body) => {
936 check_trait_fn_not_const(ccx, impl_item.span, sig.constness);
938 let impl_method = match ty_impl_item {
939 ty::MethodTraitItem(ref mti) => mti,
940 _ => tcx.sess.span_bug(impl_item.span, "non-method impl-item for method")
943 if let &ty::MethodTraitItem(ref trait_method) = ty_trait_item {
944 compare_impl_method(ccx.tcx,
951 span_err!(tcx.sess, impl_item.span, E0324,
952 "item `{}` is an associated method, \
953 which doesn't match its trait `{:?}`",
958 hir::TypeImplItem(_) => {
959 let impl_type = match ty_impl_item {
960 ty::TypeTraitItem(ref tti) => tti,
961 _ => tcx.sess.span_bug(impl_item.span, "non-type impl-item for type")
964 if let &ty::TypeTraitItem(ref at) = ty_trait_item {
965 if let Some(_) = at.ty {
966 overridden_associated_type = Some(impl_item);
969 span_err!(tcx.sess, impl_item.span, E0325,
970 "item `{}` is an associated type, \
971 which doesn't match its trait `{:?}`",
979 // Check for missing items from trait
980 let provided_methods = tcx.provided_trait_methods(impl_trait_ref.def_id);
981 let associated_consts = tcx.associated_consts(impl_trait_ref.def_id);
982 let mut missing_items = Vec::new();
983 let mut invalidated_items = Vec::new();
984 let associated_type_overridden = overridden_associated_type.is_some();
985 for trait_item in trait_items.iter() {
987 ty::ConstTraitItem(ref associated_const) => {
988 let is_implemented = impl_items.iter().any(|ii| {
990 hir::ConstImplItem(..) => {
991 ii.ident.name == associated_const.name
997 associated_consts.iter().any(|ac| ac.default.is_some() &&
998 ac.name == associated_const.name);
1001 missing_items.push(associated_const.name);
1002 } else if associated_type_overridden {
1003 invalidated_items.push(associated_const.name);
1007 ty::MethodTraitItem(ref trait_method) => {
1008 let is_implemented =
1009 impl_items.iter().any(|ii| {
1011 hir::MethodImplItem(..) => {
1012 ii.ident.name == trait_method.name
1018 provided_methods.iter().any(|m| m.name == trait_method.name);
1019 if !is_implemented {
1021 missing_items.push(trait_method.name);
1022 } else if associated_type_overridden {
1023 invalidated_items.push(trait_method.name);
1027 ty::TypeTraitItem(ref associated_type) => {
1028 let is_implemented = impl_items.iter().any(|ii| {
1030 hir::TypeImplItem(_) => {
1031 ii.ident.name == associated_type.name
1036 let is_provided = associated_type.ty.is_some();
1037 if !is_implemented {
1039 missing_items.push(associated_type.name);
1040 } else if associated_type_overridden {
1041 invalidated_items.push(associated_type.name);
1048 if !missing_items.is_empty() {
1049 span_err!(tcx.sess, impl_span, E0046,
1050 "not all trait items implemented, missing: `{}`",
1051 missing_items.iter()
1052 .map(|name| name.to_string())
1053 .collect::<Vec<_>>().join("`, `"))
1056 if !invalidated_items.is_empty() {
1057 let invalidator = overridden_associated_type.unwrap();
1058 span_err!(tcx.sess, invalidator.span, E0399,
1059 "the following trait items need to be reimplemented \
1060 as `{}` was overridden: `{}`",
1062 invalidated_items.iter()
1063 .map(|name| name.to_string())
1064 .collect::<Vec<_>>().join("`, `"))
1068 fn report_cast_to_unsized_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
1075 let tstr = fcx.infcx().ty_to_string(t_cast);
1076 fcx.type_error_message(span, |actual| {
1077 format!("cast to unsized type: `{}` as `{}`", actual, tstr)
1080 ty::TyRef(_, ty::TypeAndMut { mutbl: mt, .. }) => {
1081 let mtstr = match mt {
1082 hir::MutMutable => "mut ",
1083 hir::MutImmutable => ""
1085 if t_cast.is_trait() {
1086 match fcx.tcx().sess.codemap().span_to_snippet(t_span) {
1088 fcx.tcx().sess.span_suggestion(t_span,
1089 "try casting to a reference instead:",
1090 format!("&{}{}", mtstr, s));
1093 span_help!(fcx.tcx().sess, t_span,
1094 "did you mean `&{}{}`?", mtstr, tstr),
1097 span_help!(fcx.tcx().sess, span,
1098 "consider using an implicit coercion to `&{}{}` instead",
1103 match fcx.tcx().sess.codemap().span_to_snippet(t_span) {
1105 fcx.tcx().sess.span_suggestion(t_span,
1106 "try casting to a `Box` instead:",
1107 format!("Box<{}>", s));
1110 span_help!(fcx.tcx().sess, t_span, "did you mean `Box<{}>`?", tstr),
1114 span_help!(fcx.tcx().sess, e_span,
1115 "consider using a box or reference as appropriate");
1118 fcx.write_error(id);
1122 impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> {
1123 fn tcx(&self) -> &ty::ctxt<'tcx> { self.ccx.tcx }
1125 fn get_item_type_scheme(&self, _: Span, id: DefId)
1126 -> Result<ty::TypeScheme<'tcx>, ErrorReported>
1128 Ok(self.tcx().lookup_item_type(id))
1131 fn get_trait_def(&self, _: Span, id: DefId)
1132 -> Result<&'tcx ty::TraitDef<'tcx>, ErrorReported>
1134 Ok(self.tcx().lookup_trait_def(id))
1137 fn ensure_super_predicates(&self, _: Span, _: DefId) -> Result<(), ErrorReported> {
1138 // all super predicates are ensured during collect pass
1142 fn get_free_substs(&self) -> Option<&Substs<'tcx>> {
1143 Some(&self.inh.infcx.parameter_environment.free_substs)
1146 fn get_type_parameter_bounds(&self,
1148 node_id: ast::NodeId)
1149 -> Result<Vec<ty::PolyTraitRef<'tcx>>, ErrorReported>
1151 let def = self.tcx().type_parameter_def(node_id);
1152 let r = self.inh.infcx.parameter_environment
1155 .filter_map(|predicate| {
1157 ty::Predicate::Trait(ref data) => {
1158 if data.0.self_ty().is_param(def.space, def.index) {
1159 Some(data.to_poly_trait_ref())
1173 fn trait_defines_associated_type_named(&self,
1174 trait_def_id: DefId,
1175 assoc_name: ast::Name)
1178 let trait_def = self.ccx.tcx.lookup_trait_def(trait_def_id);
1179 trait_def.associated_type_names.contains(&assoc_name)
1183 ty_param_def: Option<ty::TypeParameterDef<'tcx>>,
1184 substs: Option<&mut subst::Substs<'tcx>>,
1185 space: Option<subst::ParamSpace>,
1186 span: Span) -> Ty<'tcx> {
1187 // Grab the default doing subsitution
1188 let default = ty_param_def.and_then(|def| {
1189 def.default.map(|ty| type_variable::Default {
1190 ty: ty.subst_spanned(self.tcx(), substs.as_ref().unwrap(), Some(span)),
1192 def_id: def.default_def_id
1196 let ty_var = self.infcx().next_ty_var_with_default(default);
1198 // Finally we add the type variable to the substs
1201 Some(substs) => { substs.types.push(space.unwrap(), ty_var); ty_var }
1205 fn projected_ty_from_poly_trait_ref(&self,
1207 poly_trait_ref: ty::PolyTraitRef<'tcx>,
1208 item_name: ast::Name)
1211 let (trait_ref, _) =
1212 self.infcx().replace_late_bound_regions_with_fresh_var(
1214 infer::LateBoundRegionConversionTime::AssocTypeProjection(item_name),
1217 self.normalize_associated_type(span, trait_ref, item_name)
1220 fn projected_ty(&self,
1222 trait_ref: ty::TraitRef<'tcx>,
1223 item_name: ast::Name)
1226 self.normalize_associated_type(span, trait_ref, item_name)
1230 impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
1231 fn tcx(&self) -> &ty::ctxt<'tcx> { self.ccx.tcx }
1233 pub fn infcx(&self) -> &infer::InferCtxt<'a,'tcx> {
1237 pub fn param_env(&self) -> &ty::ParameterEnvironment<'a,'tcx> {
1238 &self.inh.infcx.parameter_environment
1241 pub fn sess(&self) -> &Session {
1245 pub fn err_count_since_creation(&self) -> usize {
1246 self.ccx.tcx.sess.err_count() - self.err_count_on_creation
1249 /// Resolves type variables in `ty` if possible. Unlike the infcx
1250 /// version, this version will also select obligations if it seems
1251 /// useful, in an effort to get more type information.
1252 fn resolve_type_vars_if_possible(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> {
1253 debug!("resolve_type_vars_if_possible(ty={:?})", ty);
1255 // No TyInfer()? Nothing needs doing.
1256 if !ty.has_infer_types() {
1257 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1261 // If `ty` is a type variable, see whether we already know what it is.
1262 ty = self.infcx().resolve_type_vars_if_possible(&ty);
1263 if !ty.has_infer_types() {
1264 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1268 // If not, try resolving any new fcx obligations that have cropped up.
1269 self.select_new_obligations();
1270 ty = self.infcx().resolve_type_vars_if_possible(&ty);
1271 if !ty.has_infer_types() {
1272 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1276 // If not, try resolving *all* pending obligations as much as
1277 // possible. This can help substantially when there are
1278 // indirect dependencies that don't seem worth tracking
1280 self.select_obligations_where_possible();
1281 ty = self.infcx().resolve_type_vars_if_possible(&ty);
1283 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1287 fn record_deferred_call_resolution(&self,
1288 closure_def_id: DefId,
1289 r: DeferredCallResolutionHandler<'tcx>) {
1290 let mut deferred_call_resolutions = self.inh.deferred_call_resolutions.borrow_mut();
1291 deferred_call_resolutions.entry(closure_def_id).or_insert(vec![]).push(r);
1294 fn remove_deferred_call_resolutions(&self,
1295 closure_def_id: DefId)
1296 -> Vec<DeferredCallResolutionHandler<'tcx>>
1298 let mut deferred_call_resolutions = self.inh.deferred_call_resolutions.borrow_mut();
1299 deferred_call_resolutions.remove(&closure_def_id).unwrap_or(Vec::new())
1302 pub fn tag(&self) -> String {
1303 let self_ptr: *const FnCtxt = self;
1304 format!("{:?}", self_ptr)
1307 pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> Ty<'tcx> {
1308 match self.inh.locals.borrow().get(&nid) {
1311 self.tcx().sess.span_err(
1313 &format!("no type for local variable {}", nid));
1314 self.tcx().types.err
1320 pub fn write_ty(&self, node_id: ast::NodeId, ty: Ty<'tcx>) {
1321 debug!("write_ty({}, {:?}) in fcx {}",
1322 node_id, ty, self.tag());
1323 self.inh.tables.borrow_mut().node_types.insert(node_id, ty);
1326 pub fn write_substs(&self, node_id: ast::NodeId, substs: ty::ItemSubsts<'tcx>) {
1327 if !substs.substs.is_noop() {
1328 debug!("write_substs({}, {:?}) in fcx {}",
1333 self.inh.tables.borrow_mut().item_substs.insert(node_id, substs);
1337 pub fn write_autoderef_adjustment(&self,
1338 node_id: ast::NodeId,
1340 self.write_adjustment(
1342 adjustment::AdjustDerefRef(adjustment::AutoDerefRef {
1350 pub fn write_adjustment(&self,
1351 node_id: ast::NodeId,
1352 adj: adjustment::AutoAdjustment<'tcx>) {
1353 debug!("write_adjustment(node_id={}, adj={:?})", node_id, adj);
1355 if adj.is_identity() {
1359 self.inh.tables.borrow_mut().adjustments.insert(node_id, adj);
1362 /// Basically whenever we are converting from a type scheme into
1363 /// the fn body space, we always want to normalize associated
1364 /// types as well. This function combines the two.
1365 fn instantiate_type_scheme<T>(&self,
1367 substs: &Substs<'tcx>,
1370 where T : TypeFoldable<'tcx> + HasTypeFlags
1372 let value = value.subst(self.tcx(), substs);
1373 let result = self.normalize_associated_types_in(span, &value);
1374 debug!("instantiate_type_scheme(value={:?}, substs={:?}) = {:?}",
1381 /// As `instantiate_type_scheme`, but for the bounds found in a
1382 /// generic type scheme.
1383 fn instantiate_bounds(&self,
1385 substs: &Substs<'tcx>,
1386 bounds: &ty::GenericPredicates<'tcx>)
1387 -> ty::InstantiatedPredicates<'tcx>
1389 ty::InstantiatedPredicates {
1390 predicates: self.instantiate_type_scheme(span, substs, &bounds.predicates)
1395 fn normalize_associated_types_in<T>(&self, span: Span, value: &T) -> T
1396 where T : TypeFoldable<'tcx> + HasTypeFlags
1398 self.inh.normalize_associated_types_in(span, self.body_id, value)
1401 fn normalize_associated_type(&self,
1403 trait_ref: ty::TraitRef<'tcx>,
1404 item_name: ast::Name)
1407 let cause = traits::ObligationCause::new(span,
1409 traits::ObligationCauseCode::MiscObligation);
1414 .normalize_projection_type(self.infcx(),
1416 trait_ref: trait_ref,
1417 item_name: item_name,
1422 /// Instantiates the type in `did` with the generics in `path` and returns
1423 /// it (registering the necessary trait obligations along the way).
1425 /// Note that this function is only intended to be used with type-paths,
1426 /// not with value-paths.
1427 pub fn instantiate_type(&self,
1432 debug!("instantiate_type(did={:?}, path={:?})", did, path);
1434 self.tcx().lookup_item_type(did);
1435 let type_predicates =
1436 self.tcx().lookup_predicates(did);
1437 let substs = astconv::ast_path_substs_for_ty(self, self,
1439 PathParamMode::Optional,
1440 &type_scheme.generics,
1441 path.segments.last().unwrap());
1442 debug!("instantiate_type: ty={:?} substs={:?}", &type_scheme.ty, &substs);
1444 self.instantiate_bounds(path.span, &substs, &type_predicates);
1445 self.add_obligations_for_parameters(
1446 traits::ObligationCause::new(
1449 traits::ItemObligation(did)),
1452 self.instantiate_type_scheme(path.span, &substs, &type_scheme.ty)
1455 /// Return the dict-like variant corresponding to a given `Def`.
1456 pub fn def_struct_variant(&self,
1458 -> Option<(ty::AdtDef<'tcx>, ty::VariantDef<'tcx>)>
1460 let (adt, variant) = match def {
1461 def::DefVariant(enum_id, variant_id, true) => {
1462 let adt = self.tcx().lookup_adt_def(enum_id);
1463 (adt, adt.variant_with_id(variant_id))
1465 def::DefTy(did, _) | def::DefStruct(did) => {
1466 let typ = self.tcx().lookup_item_type(did);
1467 if let ty::TyStruct(adt, _) = typ.ty.sty {
1468 (adt, adt.struct_variant())
1476 if let ty::VariantKind::Dict = variant.kind() {
1477 Some((adt, variant))
1484 pub fn write_nil(&self, node_id: ast::NodeId) {
1485 self.write_ty(node_id, self.tcx().mk_nil());
1487 pub fn write_error(&self, node_id: ast::NodeId) {
1488 self.write_ty(node_id, self.tcx().types.err);
1491 pub fn require_type_meets(&self,
1494 code: traits::ObligationCauseCode<'tcx>,
1495 bound: ty::BuiltinBound)
1497 self.register_builtin_bound(
1500 traits::ObligationCause::new(span, self.body_id, code));
1503 pub fn require_type_is_sized(&self,
1506 code: traits::ObligationCauseCode<'tcx>)
1508 self.require_type_meets(ty, span, code, ty::BoundSized);
1511 pub fn require_expr_have_sized_type(&self,
1513 code: traits::ObligationCauseCode<'tcx>)
1515 self.require_type_is_sized(self.expr_ty(expr), expr.span, code);
1518 pub fn type_is_known_to_be_sized(&self,
1523 traits::type_known_to_meet_builtin_bound(self.infcx(),
1529 pub fn register_builtin_bound(&self,
1531 builtin_bound: ty::BuiltinBound,
1532 cause: traits::ObligationCause<'tcx>)
1534 self.inh.infcx.fulfillment_cx.borrow_mut()
1535 .register_builtin_bound(self.infcx(), ty, builtin_bound, cause);
1538 pub fn register_predicate(&self,
1539 obligation: traits::PredicateObligation<'tcx>)
1541 debug!("register_predicate({:?})",
1543 self.inh.infcx.fulfillment_cx
1545 .register_predicate_obligation(self.infcx(), obligation);
1548 pub fn to_ty(&self, ast_t: &hir::Ty) -> Ty<'tcx> {
1549 let t = ast_ty_to_ty(self, self, ast_t);
1551 // Generally speaking, we must check that types entered by the
1552 // user are well-formed. This is not true for `_`, since those
1553 // types are generated by inference. Now, you might think that
1554 // we could as well generate a WF obligation -- but
1555 // unfortunately that breaks code like `foo as *const _`,
1556 // because those type variables wind up being unconstrained
1557 // until very late. Nasty. Probably it'd be best to refactor
1558 // that code path, but that's tricky because of
1562 _ => { self.register_wf_obligation(t, ast_t.span, traits::MiscObligation); }
1568 pub fn expr_ty(&self, ex: &hir::Expr) -> Ty<'tcx> {
1569 match self.inh.tables.borrow().node_types.get(&ex.id) {
1572 self.tcx().sess.bug(&format!("no type for expr in fcx {}",
1578 /// Apply `adjustment` to the type of `expr`
1579 pub fn adjust_expr_ty(&self,
1581 adjustment: Option<&adjustment::AutoAdjustment<'tcx>>)
1584 let raw_ty = self.expr_ty(expr);
1585 let raw_ty = self.infcx().shallow_resolve(raw_ty);
1586 let resolve_ty = |ty: Ty<'tcx>| self.infcx().resolve_type_vars_if_possible(&ty);
1587 raw_ty.adjust(self.tcx(), expr.span, expr.id, adjustment, |method_call| {
1588 self.inh.tables.borrow().method_map.get(&method_call)
1589 .map(|method| resolve_ty(method.ty))
1593 pub fn node_ty(&self, id: ast::NodeId) -> Ty<'tcx> {
1594 match self.inh.tables.borrow().node_types.get(&id) {
1596 None if self.err_count_since_creation() != 0 => self.tcx().types.err,
1598 self.tcx().sess.bug(
1599 &format!("no type for node {}: {} in fcx {}",
1600 id, self.tcx().map.node_to_string(id),
1606 pub fn item_substs(&self) -> Ref<NodeMap<ty::ItemSubsts<'tcx>>> {
1607 // NOTE: @jroesch this is hack that appears to be fixed on nightly, will monitor if
1608 // it changes when we upgrade the snapshot compiler
1609 fn project_item_susbts<'a, 'tcx>(tables: &'a ty::Tables<'tcx>)
1610 -> &'a NodeMap<ty::ItemSubsts<'tcx>> {
1614 Ref::map(self.inh.tables.borrow(), project_item_susbts)
1617 pub fn opt_node_ty_substs<F>(&self,
1620 F: FnOnce(&ty::ItemSubsts<'tcx>),
1622 match self.inh.tables.borrow().item_substs.get(&id) {
1628 pub fn mk_subty(&self,
1629 a_is_expected: bool,
1630 origin: infer::TypeOrigin,
1633 -> Result<(), TypeError<'tcx>> {
1634 infer::mk_subty(self.infcx(), a_is_expected, origin, sub, sup)
1637 pub fn mk_eqty(&self,
1638 a_is_expected: bool,
1639 origin: infer::TypeOrigin,
1642 -> Result<(), TypeError<'tcx>> {
1643 infer::mk_eqty(self.infcx(), a_is_expected, origin, sub, sup)
1646 pub fn mk_subr(&self,
1647 origin: infer::SubregionOrigin<'tcx>,
1650 infer::mk_subr(self.infcx(), origin, sub, sup)
1653 pub fn type_error_message<M>(&self,
1656 actual_ty: Ty<'tcx>,
1657 err: Option<&TypeError<'tcx>>) where
1658 M: FnOnce(String) -> String,
1660 self.infcx().type_error_message(sp, mk_msg, actual_ty, err);
1663 pub fn report_mismatched_types(&self,
1667 err: &TypeError<'tcx>) {
1668 self.infcx().report_mismatched_types(sp, e, a, err)
1671 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1672 /// outlive the region `r`.
1673 pub fn register_region_obligation(&self,
1676 cause: traits::ObligationCause<'tcx>)
1678 let mut fulfillment_cx = self.inh.infcx.fulfillment_cx.borrow_mut();
1679 fulfillment_cx.register_region_obligation(ty, region, cause);
1682 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1683 /// outlive the region `r`.
1684 pub fn register_wf_obligation(&self,
1687 code: traits::ObligationCauseCode<'tcx>)
1689 // WF obligations never themselves fail, so no real need to give a detailed cause:
1690 let cause = traits::ObligationCause::new(span, self.body_id, code);
1691 self.register_predicate(traits::Obligation::new(cause, ty::Predicate::WellFormed(ty)));
1694 pub fn register_old_wf_obligation(&self,
1697 code: traits::ObligationCauseCode<'tcx>)
1699 // Registers an "old-style" WF obligation that uses the
1700 // implicator code. This is basically a buggy version of
1701 // `register_wf_obligation` that is being kept around
1702 // temporarily just to help with phasing in the newer rules.
1704 // FIXME(#27579) all uses of this should be migrated to register_wf_obligation eventually
1705 let cause = traits::ObligationCause::new(span, self.body_id, code);
1706 self.register_region_obligation(ty, ty::ReEmpty, cause);
1709 /// Registers obligations that all types appearing in `substs` are well-formed.
1710 pub fn add_wf_bounds(&self, substs: &Substs<'tcx>, expr: &hir::Expr)
1712 for &ty in &substs.types {
1713 self.register_wf_obligation(ty, expr.span, traits::MiscObligation);
1717 /// Given a fully substituted set of bounds (`generic_bounds`), and the values with which each
1718 /// type/region parameter was instantiated (`substs`), creates and registers suitable
1719 /// trait/region obligations.
1721 /// For example, if there is a function:
1724 /// fn foo<'a,T:'a>(...)
1727 /// and a reference:
1733 /// Then we will create a fresh region variable `'$0` and a fresh type variable `$1` for `'a`
1734 /// and `T`. This routine will add a region obligation `$1:'$0` and register it locally.
1735 pub fn add_obligations_for_parameters(&self,
1736 cause: traits::ObligationCause<'tcx>,
1737 predicates: &ty::InstantiatedPredicates<'tcx>)
1739 assert!(!predicates.has_escaping_regions());
1741 debug!("add_obligations_for_parameters(predicates={:?})",
1744 for obligation in traits::predicates_for_generics(cause, predicates) {
1745 self.register_predicate(obligation);
1749 // FIXME(arielb1): use this instead of field.ty everywhere
1750 pub fn field_ty(&self,
1752 field: ty::FieldDef<'tcx>,
1753 substs: &Substs<'tcx>)
1756 self.normalize_associated_types_in(span,
1757 &field.ty(self.tcx(), substs))
1760 // Only for fields! Returns <none> for methods>
1761 // Indifferent to privacy flags
1762 fn check_casts(&self) {
1763 let mut deferred_cast_checks = self.inh.deferred_cast_checks.borrow_mut();
1764 for cast in deferred_cast_checks.drain(..) {
1769 /// Apply "fallbacks" to some types
1770 /// ! gets replaced with (), unconstrained ints with i32, and unconstrained floats with f64.
1771 fn default_type_parameters(&self) {
1772 use middle::ty::error::UnconstrainedNumeric::Neither;
1773 use middle::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1774 for ty in &self.infcx().unsolved_variables() {
1775 let resolved = self.infcx().resolve_type_vars_if_possible(ty);
1776 if self.infcx().type_var_diverges(resolved) {
1777 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1779 match self.infcx().type_is_unconstrained_numeric(resolved) {
1780 UnconstrainedInt => {
1781 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
1783 UnconstrainedFloat => {
1784 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
1792 fn select_all_obligations_and_apply_defaults(&self) {
1793 if self.tcx().sess.features.borrow().default_type_parameter_fallback {
1794 self.new_select_all_obligations_and_apply_defaults();
1796 self.old_select_all_obligations_and_apply_defaults();
1800 // Implements old type inference fallback algorithm
1801 fn old_select_all_obligations_and_apply_defaults(&self) {
1802 self.select_obligations_where_possible();
1803 self.default_type_parameters();
1804 self.select_obligations_where_possible();
1807 fn new_select_all_obligations_and_apply_defaults(&self) {
1808 use middle::ty::error::UnconstrainedNumeric::Neither;
1809 use middle::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1811 // For the time being this errs on the side of being memory wasteful but provides better
1813 // let type_variables = self.infcx().type_variables.clone();
1815 // There is a possibility that this algorithm will have to run an arbitrary number of times
1816 // to terminate so we bound it by the compiler's recursion limit.
1817 for _ in (0..self.tcx().sess.recursion_limit.get()) {
1818 // First we try to solve all obligations, it is possible that the last iteration
1819 // has made it possible to make more progress.
1820 self.select_obligations_where_possible();
1822 let mut conflicts = Vec::new();
1824 // Collect all unsolved type, integral and floating point variables.
1825 let unsolved_variables = self.inh.infcx.unsolved_variables();
1827 // We must collect the defaults *before* we do any unification. Because we have
1828 // directly attached defaults to the type variables any unification that occurs
1829 // will erase defaults causing conflicting defaults to be completely ignored.
1830 let default_map: FnvHashMap<_, _> =
1833 .filter_map(|t| self.infcx().default(t).map(|d| (t, d)))
1836 let mut unbound_tyvars = HashSet::new();
1838 debug!("select_all_obligations_and_apply_defaults: defaults={:?}", default_map);
1840 // We loop over the unsolved variables, resolving them and if they are
1841 // and unconstrainted numberic type we add them to the set of unbound
1842 // variables. We do this so we only apply literal fallback to type
1843 // variables without defaults.
1844 for ty in &unsolved_variables {
1845 let resolved = self.infcx().resolve_type_vars_if_possible(ty);
1846 if self.infcx().type_var_diverges(resolved) {
1847 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1849 match self.infcx().type_is_unconstrained_numeric(resolved) {
1850 UnconstrainedInt | UnconstrainedFloat => {
1851 unbound_tyvars.insert(resolved);
1858 // We now remove any numeric types that also have defaults, and instead insert
1859 // the type variable with a defined fallback.
1860 for ty in &unsolved_variables {
1861 if let Some(_default) = default_map.get(ty) {
1862 let resolved = self.infcx().resolve_type_vars_if_possible(ty);
1864 debug!("select_all_obligations_and_apply_defaults: ty: {:?} with default: {:?}",
1867 match resolved.sty {
1868 ty::TyInfer(ty::TyVar(_)) => {
1869 unbound_tyvars.insert(ty);
1872 ty::TyInfer(ty::IntVar(_)) | ty::TyInfer(ty::FloatVar(_)) => {
1873 unbound_tyvars.insert(ty);
1874 if unbound_tyvars.contains(resolved) {
1875 unbound_tyvars.remove(resolved);
1884 // If there are no more fallbacks to apply at this point we have applied all possible
1885 // defaults and type inference will procede as normal.
1886 if unbound_tyvars.is_empty() {
1890 // Finally we go through each of the unbound type variables and unify them with
1891 // the proper fallback, reporting a conflicting default error if any of the
1892 // unifications fail. We know it must be a conflicting default because the
1893 // variable would only be in `unbound_tyvars` and have a concrete value if
1894 // it had been solved by previously applying a default.
1896 // We wrap this in a transaction for error reporting, if we detect a conflict
1897 // we will rollback the inference context to its prior state so we can probe
1898 // for conflicts and correctly report them.
1901 let _ = self.infcx().commit_if_ok(|_: &infer::CombinedSnapshot| {
1902 for ty in &unbound_tyvars {
1903 if self.infcx().type_var_diverges(ty) {
1904 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1906 match self.infcx().type_is_unconstrained_numeric(ty) {
1907 UnconstrainedInt => {
1908 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
1910 UnconstrainedFloat => {
1911 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
1914 if let Some(default) = default_map.get(ty) {
1915 let default = default.clone();
1916 match infer::mk_eqty(self.infcx(), false,
1917 infer::Misc(default.origin_span),
1921 conflicts.push((*ty, default));
1930 // If there are conflicts we rollback, otherwise commit
1931 if conflicts.len() > 0 {
1938 if conflicts.len() > 0 {
1939 // Loop through each conflicting default, figuring out the default that caused
1940 // a unification failure and then report an error for each.
1941 for (conflict, default) in conflicts {
1942 let conflicting_default =
1943 self.find_conflicting_default(&unbound_tyvars, &default_map, conflict)
1944 .unwrap_or(type_variable::Default {
1945 ty: self.infcx().next_ty_var(),
1946 origin_span: codemap::DUMMY_SP,
1947 def_id: DefId::local(0) // what do I put here?
1950 // This is to ensure that we elimnate any non-determinism from the error
1951 // reporting by fixing an order, it doesn't matter what order we choose
1952 // just that it is consistent.
1953 let (first_default, second_default) =
1954 if default.def_id < conflicting_default.def_id {
1955 (default, conflicting_default)
1957 (conflicting_default, default)
1961 self.infcx().report_conflicting_default_types(
1962 first_default.origin_span,
1969 self.select_obligations_where_possible();
1972 // For use in error handling related to default type parameter fallback. We explicitly
1973 // apply the default that caused conflict first to a local version of the type variable
1974 // table then apply defaults until we find a conflict. That default must be the one
1975 // that caused conflict earlier.
1976 fn find_conflicting_default(&self,
1977 unbound_vars: &HashSet<Ty<'tcx>>,
1978 default_map: &FnvHashMap<&Ty<'tcx>, type_variable::Default<'tcx>>,
1980 -> Option<type_variable::Default<'tcx>> {
1981 use middle::ty::error::UnconstrainedNumeric::Neither;
1982 use middle::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1984 // Ensure that we apply the conflicting default first
1985 let mut unbound_tyvars = Vec::with_capacity(unbound_vars.len() + 1);
1986 unbound_tyvars.push(conflict);
1987 unbound_tyvars.extend(unbound_vars.iter());
1989 let mut result = None;
1990 // We run the same code as above applying defaults in order, this time when
1991 // we find the conflict we just return it for error reporting above.
1993 // We also run this inside snapshot that never commits so we can do error
1994 // reporting for more then one conflict.
1995 for ty in &unbound_tyvars {
1996 if self.infcx().type_var_diverges(ty) {
1997 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1999 match self.infcx().type_is_unconstrained_numeric(ty) {
2000 UnconstrainedInt => {
2001 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
2003 UnconstrainedFloat => {
2004 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
2007 if let Some(default) = default_map.get(ty) {
2008 let default = default.clone();
2009 match infer::mk_eqty(self.infcx(), false,
2010 infer::Misc(default.origin_span),
2014 result = Some(default);
2026 fn select_all_obligations_or_error(&self) {
2027 debug!("select_all_obligations_or_error");
2029 // upvar inference should have ensured that all deferred call
2030 // resolutions are handled by now.
2031 assert!(self.inh.deferred_call_resolutions.borrow().is_empty());
2033 self.select_all_obligations_and_apply_defaults();
2035 let mut fulfillment_cx = self.inh.infcx.fulfillment_cx.borrow_mut();
2036 match fulfillment_cx.select_all_or_error(self.infcx()) {
2038 Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
2042 /// Select as many obligations as we can at present.
2043 fn select_obligations_where_possible(&self) {
2045 self.inh.infcx.fulfillment_cx
2047 .select_where_possible(self.infcx())
2050 Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
2054 /// Try to select any fcx obligation that we haven't tried yet, in an effort
2055 /// to improve inference. You could just call
2056 /// `select_obligations_where_possible` except that it leads to repeated
2058 fn select_new_obligations(&self) {
2060 self.inh.infcx.fulfillment_cx
2062 .select_new_obligations(self.infcx())
2065 Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
2071 impl<'a, 'tcx> RegionScope for FnCtxt<'a, 'tcx> {
2072 fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> {
2073 Some(self.base_object_lifetime_default(span))
2076 fn base_object_lifetime_default(&self, span: Span) -> ty::Region {
2077 // RFC #599 specifies that object lifetime defaults take
2078 // precedence over other defaults. But within a fn body we
2079 // don't have a *default* region, rather we use inference to
2080 // find the *correct* region, which is strictly more general
2081 // (and anyway, within a fn body the right region may not even
2082 // be something the user can write explicitly, since it might
2083 // be some expression).
2084 self.infcx().next_region_var(infer::MiscVariable(span))
2087 fn anon_regions(&self, span: Span, count: usize)
2088 -> Result<Vec<ty::Region>, Option<Vec<ElisionFailureInfo>>> {
2089 Ok((0..count).map(|_| {
2090 self.infcx().next_region_var(infer::MiscVariable(span))
2095 /// Whether `autoderef` requires types to resolve.
2096 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
2097 pub enum UnresolvedTypeAction {
2098 /// Produce an error and return `TyError` whenever a type cannot
2099 /// be resolved (i.e. it is `TyInfer`).
2101 /// Go on without emitting any errors, and return the unresolved
2102 /// type. Useful for probing, e.g. in coercions.
2106 /// Executes an autoderef loop for the type `t`. At each step, invokes `should_stop` to decide
2107 /// whether to terminate the loop. Returns the final type and number of derefs that it performed.
2109 /// Note: this method does not modify the adjustments table. The caller is responsible for
2110 /// inserting an AutoAdjustment record into the `fcx` using one of the suitable methods.
2111 pub fn autoderef<'a, 'tcx, T, F>(fcx: &FnCtxt<'a, 'tcx>,
2114 opt_expr: Option<&hir::Expr>,
2115 unresolved_type_action: UnresolvedTypeAction,
2116 mut lvalue_pref: LvaluePreference,
2118 -> (Ty<'tcx>, usize, Option<T>)
2119 where F: FnMut(Ty<'tcx>, usize) -> Option<T>,
2121 debug!("autoderef(base_ty={:?}, opt_expr={:?}, lvalue_pref={:?})",
2126 let mut t = base_ty;
2127 for autoderefs in 0..fcx.tcx().sess.recursion_limit.get() {
2128 let resolved_t = match unresolved_type_action {
2129 UnresolvedTypeAction::Error => {
2130 structurally_resolved_type(fcx, sp, t)
2132 UnresolvedTypeAction::Ignore => {
2133 // We can continue even when the type cannot be resolved
2134 // (i.e. it is an inference variable) because `Ty::builtin_deref`
2135 // and `try_overloaded_deref` both simply return `None`
2136 // in such a case without producing spurious errors.
2137 fcx.resolve_type_vars_if_possible(t)
2140 if resolved_t.references_error() {
2141 return (resolved_t, autoderefs, None);
2144 match should_stop(resolved_t, autoderefs) {
2145 Some(x) => return (resolved_t, autoderefs, Some(x)),
2149 // Otherwise, deref if type is derefable:
2150 let mt = match resolved_t.builtin_deref(false, lvalue_pref) {
2151 Some(mt) => Some(mt),
2154 opt_expr.map(|expr| MethodCall::autoderef(expr.id, autoderefs as u32));
2156 // Super subtle: it might seem as though we should
2157 // pass `opt_expr` to `try_overloaded_deref`, so that
2158 // the (implicit) autoref of using an overloaded deref
2159 // would get added to the adjustment table. However we
2160 // do not do that, because it's kind of a
2161 // "meta-adjustment" -- instead, we just leave it
2162 // unrecorded and know that there "will be" an
2163 // autoref. regionck and other bits of the code base,
2164 // when they encounter an overloaded autoderef, have
2165 // to do some reconstructive surgery. This is a pretty
2166 // complex mess that is begging for a proper MIR.
2167 try_overloaded_deref(fcx, sp, method_call, None, resolved_t, lvalue_pref)
2173 if mt.mutbl == hir::MutImmutable {
2174 lvalue_pref = NoPreference;
2177 None => return (resolved_t, autoderefs, None)
2181 // We've reached the recursion limit, error gracefully.
2182 span_err!(fcx.tcx().sess, sp, E0055,
2183 "reached the recursion limit while auto-dereferencing {:?}",
2185 (fcx.tcx().types.err, 0, None)
2188 fn try_overloaded_deref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2190 method_call: Option<MethodCall>,
2191 base_expr: Option<&hir::Expr>,
2193 lvalue_pref: LvaluePreference)
2194 -> Option<ty::TypeAndMut<'tcx>>
2196 // Try DerefMut first, if preferred.
2197 let method = match (lvalue_pref, fcx.tcx().lang_items.deref_mut_trait()) {
2198 (PreferMutLvalue, Some(trait_did)) => {
2199 method::lookup_in_trait(fcx, span, base_expr,
2200 token::intern("deref_mut"), trait_did,
2206 // Otherwise, fall back to Deref.
2207 let method = match (method, fcx.tcx().lang_items.deref_trait()) {
2208 (None, Some(trait_did)) => {
2209 method::lookup_in_trait(fcx, span, base_expr,
2210 token::intern("deref"), trait_did,
2213 (method, _) => method
2216 make_overloaded_lvalue_return_type(fcx, method_call, method)
2219 /// For the overloaded lvalue expressions (`*x`, `x[3]`), the trait returns a type of `&T`, but the
2220 /// actual type we assign to the *expression* is `T`. So this function just peels off the return
2221 /// type by one layer to yield `T`. It also inserts the `method-callee` into the method map.
2222 fn make_overloaded_lvalue_return_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2223 method_call: Option<MethodCall>,
2224 method: Option<MethodCallee<'tcx>>)
2225 -> Option<ty::TypeAndMut<'tcx>>
2229 // extract method method return type, which will be &T;
2230 // all LB regions should have been instantiated during method lookup
2231 let ret_ty = method.ty.fn_ret();
2232 let ret_ty = fcx.tcx().no_late_bound_regions(&ret_ty).unwrap().unwrap();
2234 if let Some(method_call) = method_call {
2235 fcx.inh.tables.borrow_mut().method_map.insert(method_call, method);
2238 // method returns &T, but the type as visible to user is T, so deref
2239 ret_ty.builtin_deref(true, NoPreference)
2245 fn lookup_indexing<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2247 base_expr: &'tcx hir::Expr,
2250 lvalue_pref: LvaluePreference)
2251 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2253 // FIXME(#18741) -- this is almost but not quite the same as the
2254 // autoderef that normal method probing does. They could likely be
2257 let (ty, autoderefs, final_mt) = autoderef(fcx,
2261 UnresolvedTypeAction::Error,
2264 try_index_step(fcx, MethodCall::expr(expr.id), expr, base_expr,
2265 adj_ty, idx, false, lvalue_pref, idx_ty)
2268 if final_mt.is_some() {
2272 // After we have fully autoderef'd, if the resulting type is [T; n], then
2273 // do a final unsized coercion to yield [T].
2274 if let ty::TyArray(element_ty, _) = ty.sty {
2275 let adjusted_ty = fcx.tcx().mk_slice(element_ty);
2276 try_index_step(fcx, MethodCall::expr(expr.id), expr, base_expr,
2277 adjusted_ty, autoderefs, true, lvalue_pref, idx_ty)
2283 /// To type-check `base_expr[index_expr]`, we progressively autoderef (and otherwise adjust)
2284 /// `base_expr`, looking for a type which either supports builtin indexing or overloaded indexing.
2285 /// This loop implements one step in that search; the autoderef loop is implemented by
2286 /// `lookup_indexing`.
2287 fn try_index_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2288 method_call: MethodCall,
2290 base_expr: &'tcx hir::Expr,
2291 adjusted_ty: Ty<'tcx>,
2294 lvalue_pref: LvaluePreference,
2296 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2298 let tcx = fcx.tcx();
2299 debug!("try_index_step(expr={:?}, base_expr.id={:?}, adjusted_ty={:?}, \
2300 autoderefs={}, unsize={}, index_ty={:?})",
2308 let input_ty = fcx.infcx().next_ty_var();
2310 // First, try built-in indexing.
2311 match (adjusted_ty.builtin_index(), &index_ty.sty) {
2312 (Some(ty), &ty::TyUint(hir::TyUs)) | (Some(ty), &ty::TyInfer(ty::IntVar(_))) => {
2313 debug!("try_index_step: success, using built-in indexing");
2314 // If we had `[T; N]`, we should've caught it before unsizing to `[T]`.
2316 fcx.write_autoderef_adjustment(base_expr.id, autoderefs);
2317 return Some((tcx.types.usize, ty));
2322 // Try `IndexMut` first, if preferred.
2323 let method = match (lvalue_pref, tcx.lang_items.index_mut_trait()) {
2324 (PreferMutLvalue, Some(trait_did)) => {
2325 method::lookup_in_trait_adjusted(fcx,
2328 token::intern("index_mut"),
2333 Some(vec![input_ty]))
2338 // Otherwise, fall back to `Index`.
2339 let method = match (method, tcx.lang_items.index_trait()) {
2340 (None, Some(trait_did)) => {
2341 method::lookup_in_trait_adjusted(fcx,
2344 token::intern("index"),
2349 Some(vec![input_ty]))
2351 (method, _) => method,
2354 // If some lookup succeeds, write callee into table and extract index/element
2355 // type from the method signature.
2356 // If some lookup succeeded, install method in table
2357 method.and_then(|method| {
2358 debug!("try_index_step: success, using overloaded indexing");
2359 make_overloaded_lvalue_return_type(fcx, Some(method_call), Some(method)).
2360 map(|ret| (input_ty, ret.ty))
2364 fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2366 method_fn_ty: Ty<'tcx>,
2367 callee_expr: &'tcx hir::Expr,
2368 args_no_rcvr: &'tcx [P<hir::Expr>],
2369 tuple_arguments: TupleArgumentsFlag,
2370 expected: Expectation<'tcx>)
2371 -> ty::FnOutput<'tcx> {
2372 if method_fn_ty.references_error() {
2373 let err_inputs = err_args(fcx.tcx(), args_no_rcvr.len());
2375 let err_inputs = match tuple_arguments {
2376 DontTupleArguments => err_inputs,
2377 TupleArguments => vec![fcx.tcx().mk_tup(err_inputs)],
2380 check_argument_types(fcx,
2387 ty::FnConverging(fcx.tcx().types.err)
2389 match method_fn_ty.sty {
2390 ty::TyBareFn(_, ref fty) => {
2391 // HACK(eddyb) ignore self in the definition (see above).
2392 let expected_arg_tys = expected_types_for_fn_args(fcx,
2396 &fty.sig.0.inputs[1..]);
2397 check_argument_types(fcx,
2399 &fty.sig.0.inputs[1..],
2400 &expected_arg_tys[..],
2407 fcx.tcx().sess.span_bug(callee_expr.span,
2408 "method without bare fn type");
2414 /// Generic function that factors out common logic from function calls, method calls and overloaded
2416 fn check_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2418 fn_inputs: &[Ty<'tcx>],
2419 expected_arg_tys: &[Ty<'tcx>],
2420 args: &'tcx [P<hir::Expr>],
2422 tuple_arguments: TupleArgumentsFlag) {
2423 let tcx = fcx.ccx.tcx;
2425 // Grab the argument types, supplying fresh type variables
2426 // if the wrong number of arguments were supplied
2427 let supplied_arg_count = if tuple_arguments == DontTupleArguments {
2433 // All the input types from the fn signature must outlive the call
2434 // so as to validate implied bounds.
2435 for &fn_input_ty in fn_inputs {
2436 fcx.register_wf_obligation(fn_input_ty, sp, traits::MiscObligation);
2439 let mut expected_arg_tys = expected_arg_tys;
2440 let expected_arg_count = fn_inputs.len();
2441 let formal_tys = if tuple_arguments == TupleArguments {
2442 let tuple_type = structurally_resolved_type(fcx, sp, fn_inputs[0]);
2443 match tuple_type.sty {
2444 ty::TyTuple(ref arg_types) => {
2445 if arg_types.len() != args.len() {
2446 span_err!(tcx.sess, sp, E0057,
2447 "this function takes {} parameter{} but {} parameter{} supplied",
2449 if arg_types.len() == 1 {""} else {"s"},
2451 if args.len() == 1 {" was"} else {"s were"});
2452 expected_arg_tys = &[];
2453 err_args(fcx.tcx(), args.len())
2455 expected_arg_tys = match expected_arg_tys.get(0) {
2456 Some(&ty) => match ty.sty {
2457 ty::TyTuple(ref tys) => &**tys,
2462 (*arg_types).clone()
2466 span_err!(tcx.sess, sp, E0059,
2467 "cannot use call notation; the first type parameter \
2468 for the function trait is neither a tuple nor unit");
2469 expected_arg_tys = &[];
2470 err_args(fcx.tcx(), args.len())
2473 } else if expected_arg_count == supplied_arg_count {
2475 } else if variadic {
2476 if supplied_arg_count >= expected_arg_count {
2479 span_err!(tcx.sess, sp, E0060,
2480 "this function takes at least {} parameter{} \
2481 but {} parameter{} supplied",
2483 if expected_arg_count == 1 {""} else {"s"},
2485 if supplied_arg_count == 1 {" was"} else {"s were"});
2486 expected_arg_tys = &[];
2487 err_args(fcx.tcx(), supplied_arg_count)
2490 span_err!(tcx.sess, sp, E0061,
2491 "this function takes {} parameter{} but {} parameter{} supplied",
2493 if expected_arg_count == 1 {""} else {"s"},
2495 if supplied_arg_count == 1 {" was"} else {"s were"});
2496 expected_arg_tys = &[];
2497 err_args(fcx.tcx(), supplied_arg_count)
2500 debug!("check_argument_types: formal_tys={:?}",
2501 formal_tys.iter().map(|t| fcx.infcx().ty_to_string(*t)).collect::<Vec<String>>());
2503 // Check the arguments.
2504 // We do this in a pretty awful way: first we typecheck any arguments
2505 // that are not anonymous functions, then we typecheck the anonymous
2506 // functions. This is so that we have more information about the types
2507 // of arguments when we typecheck the functions. This isn't really the
2508 // right way to do this.
2509 let xs = [false, true];
2510 for check_blocks in &xs {
2511 let check_blocks = *check_blocks;
2512 debug!("check_blocks={}", check_blocks);
2514 // More awful hacks: before we check argument types, try to do
2515 // an "opportunistic" vtable resolution of any trait bounds on
2516 // the call. This helps coercions.
2518 fcx.select_new_obligations();
2521 // For variadic functions, we don't have a declared type for all of
2522 // the arguments hence we only do our usual type checking with
2523 // the arguments who's types we do know.
2524 let t = if variadic {
2526 } else if tuple_arguments == TupleArguments {
2531 for (i, arg) in args.iter().take(t).enumerate() {
2532 let is_block = match arg.node {
2533 hir::ExprClosure(..) => true,
2537 if is_block == check_blocks {
2538 debug!("checking the argument");
2539 let formal_ty = formal_tys[i];
2541 // The special-cased logic below has three functions:
2542 // 1. Provide as good of an expected type as possible.
2543 let expected = expected_arg_tys.get(i).map(|&ty| {
2544 Expectation::rvalue_hint(fcx.tcx(), ty)
2547 check_expr_with_unifier(fcx,
2549 expected.unwrap_or(ExpectHasType(formal_ty)),
2551 // 2. Coerce to the most detailed type that could be coerced
2552 // to, which is `expected_ty` if `rvalue_hint` returns an
2553 // `ExprHasType(expected_ty)`, or the `formal_ty` otherwise.
2554 let coerce_ty = expected.and_then(|e| e.only_has_type(fcx));
2555 demand::coerce(fcx, arg.span, coerce_ty.unwrap_or(formal_ty), &**arg);
2557 // 3. Relate the expected type and the formal one,
2558 // if the expected type was used for the coercion.
2559 coerce_ty.map(|ty| demand::suptype(fcx, arg.span, formal_ty, ty));
2565 // We also need to make sure we at least write the ty of the other
2566 // arguments which we skipped above.
2568 for arg in args.iter().skip(expected_arg_count) {
2569 check_expr(fcx, &**arg);
2571 // There are a few types which get autopromoted when passed via varargs
2572 // in C but we just error out instead and require explicit casts.
2573 let arg_ty = structurally_resolved_type(fcx, arg.span,
2574 fcx.expr_ty(&**arg));
2576 ty::TyFloat(hir::TyF32) => {
2577 fcx.type_error_message(arg.span,
2579 format!("can't pass an {} to variadic \
2580 function, cast to c_double", t)
2583 ty::TyInt(hir::TyI8) | ty::TyInt(hir::TyI16) | ty::TyBool => {
2584 fcx.type_error_message(arg.span, |t| {
2585 format!("can't pass {} to variadic \
2586 function, cast to c_int",
2590 ty::TyUint(hir::TyU8) | ty::TyUint(hir::TyU16) => {
2591 fcx.type_error_message(arg.span, |t| {
2592 format!("can't pass {} to variadic \
2593 function, cast to c_uint",
2603 // FIXME(#17596) Ty<'tcx> is incorrectly invariant w.r.t 'tcx.
2604 fn err_args<'tcx>(tcx: &ty::ctxt<'tcx>, len: usize) -> Vec<Ty<'tcx>> {
2605 (0..len).map(|_| tcx.types.err).collect()
2608 fn write_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2609 call_expr: &hir::Expr,
2610 output: ty::FnOutput<'tcx>) {
2611 fcx.write_ty(call_expr.id, match output {
2612 ty::FnConverging(output_ty) => output_ty,
2613 ty::FnDiverging => fcx.infcx().next_diverging_ty_var()
2617 // AST fragment checking
2618 fn check_lit<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2620 expected: Expectation<'tcx>)
2623 let tcx = fcx.ccx.tcx;
2626 hir::LitStr(..) => tcx.mk_static_str(),
2627 hir::LitByteStr(ref v) => {
2628 tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic),
2629 tcx.mk_array(tcx.types.u8, v.len()))
2631 hir::LitByte(_) => tcx.types.u8,
2632 hir::LitChar(_) => tcx.types.char,
2633 hir::LitInt(_, hir::SignedIntLit(t, _)) => tcx.mk_mach_int(t),
2634 hir::LitInt(_, hir::UnsignedIntLit(t)) => tcx.mk_mach_uint(t),
2635 hir::LitInt(_, hir::UnsuffixedIntLit(_)) => {
2636 let opt_ty = expected.to_option(fcx).and_then(|ty| {
2638 ty::TyInt(_) | ty::TyUint(_) => Some(ty),
2639 ty::TyChar => Some(tcx.types.u8),
2640 ty::TyRawPtr(..) => Some(tcx.types.usize),
2641 ty::TyBareFn(..) => Some(tcx.types.usize),
2645 opt_ty.unwrap_or_else(
2646 || tcx.mk_int_var(fcx.infcx().next_int_var_id()))
2648 hir::LitFloat(_, t) => tcx.mk_mach_float(t),
2649 hir::LitFloatUnsuffixed(_) => {
2650 let opt_ty = expected.to_option(fcx).and_then(|ty| {
2652 ty::TyFloat(_) => Some(ty),
2656 opt_ty.unwrap_or_else(
2657 || tcx.mk_float_var(fcx.infcx().next_float_var_id()))
2659 hir::LitBool(_) => tcx.types.bool
2663 pub fn check_expr_has_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2664 expr: &'tcx hir::Expr,
2665 expected: Ty<'tcx>) {
2666 check_expr_with_unifier(
2667 fcx, expr, ExpectHasType(expected), NoPreference,
2668 || demand::suptype(fcx, expr.span, expected, fcx.expr_ty(expr)));
2671 fn check_expr_coercable_to_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2672 expr: &'tcx hir::Expr,
2673 expected: Ty<'tcx>) {
2674 check_expr_with_unifier(
2675 fcx, expr, ExpectHasType(expected), NoPreference,
2676 || demand::coerce(fcx, expr.span, expected, expr));
2679 fn check_expr_with_hint<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, expr: &'tcx hir::Expr,
2680 expected: Ty<'tcx>) {
2681 check_expr_with_unifier(
2682 fcx, expr, ExpectHasType(expected), NoPreference,
2686 fn check_expr_with_expectation<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2687 expr: &'tcx hir::Expr,
2688 expected: Expectation<'tcx>) {
2689 check_expr_with_unifier(
2690 fcx, expr, expected, NoPreference,
2694 fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2695 expr: &'tcx hir::Expr,
2696 expected: Expectation<'tcx>,
2697 lvalue_pref: LvaluePreference)
2699 check_expr_with_unifier(fcx, expr, expected, lvalue_pref, || ())
2702 fn check_expr<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, expr: &'tcx hir::Expr) {
2703 check_expr_with_unifier(fcx, expr, NoExpectation, NoPreference, || ())
2706 fn check_expr_with_lvalue_pref<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, expr: &'tcx hir::Expr,
2707 lvalue_pref: LvaluePreference) {
2708 check_expr_with_unifier(fcx, expr, NoExpectation, lvalue_pref, || ())
2711 // determine the `self` type, using fresh variables for all variables
2712 // declared on the impl declaration e.g., `impl<A,B> for Vec<(A,B)>`
2713 // would return ($0, $1) where $0 and $1 are freshly instantiated type
2715 pub fn impl_self_ty<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2716 span: Span, // (potential) receiver for this impl
2718 -> TypeAndSubsts<'tcx> {
2719 let tcx = fcx.tcx();
2721 let ity = tcx.lookup_item_type(did);
2722 let (tps, rps, raw_ty) =
2723 (ity.generics.types.get_slice(subst::TypeSpace),
2724 ity.generics.regions.get_slice(subst::TypeSpace),
2727 debug!("impl_self_ty: tps={:?} rps={:?} raw_ty={:?}", tps, rps, raw_ty);
2729 let rps = fcx.inh.infcx.region_vars_for_defs(span, rps);
2730 let mut substs = subst::Substs::new(
2731 VecPerParamSpace::empty(),
2732 VecPerParamSpace::new(rps, Vec::new(), Vec::new()));
2733 fcx.inh.infcx.type_vars_for_defs(span, ParamSpace::TypeSpace, &mut substs, tps);
2734 let substd_ty = fcx.instantiate_type_scheme(span, &substs, &raw_ty);
2736 TypeAndSubsts { substs: substs, ty: substd_ty }
2739 /// Controls whether the arguments are tupled. This is used for the call
2742 /// Tupling means that all call-side arguments are packed into a tuple and
2743 /// passed as a single parameter. For example, if tupling is enabled, this
2746 /// fn f(x: (isize, isize))
2748 /// Can be called as:
2755 #[derive(Clone, Eq, PartialEq)]
2756 enum TupleArgumentsFlag {
2761 /// Unifies the return type with the expected type early, for more coercions
2762 /// and forward type information on the argument expressions.
2763 fn expected_types_for_fn_args<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2765 expected_ret: Expectation<'tcx>,
2766 formal_ret: ty::FnOutput<'tcx>,
2767 formal_args: &[Ty<'tcx>])
2769 let expected_args = expected_ret.only_has_type(fcx).and_then(|ret_ty| {
2770 if let ty::FnConverging(formal_ret_ty) = formal_ret {
2771 fcx.infcx().commit_regions_if_ok(|| {
2772 // Attempt to apply a subtyping relationship between the formal
2773 // return type (likely containing type variables if the function
2774 // is polymorphic) and the expected return type.
2775 // No argument expectations are produced if unification fails.
2776 let origin = infer::Misc(call_span);
2777 let ures = fcx.infcx().sub_types(false, origin, formal_ret_ty, ret_ty);
2778 // FIXME(#15760) can't use try! here, FromError doesn't default
2779 // to identity so the resulting type is not constrained.
2780 if let Err(e) = ures {
2784 // Record all the argument types, with the substitutions
2785 // produced from the above subtyping unification.
2786 Ok(formal_args.iter().map(|ty| {
2787 fcx.infcx().resolve_type_vars_if_possible(ty)
2793 }).unwrap_or(vec![]);
2794 debug!("expected_types_for_fn_args(formal={:?} -> {:?}, expected={:?} -> {:?})",
2795 formal_args, formal_ret,
2796 expected_args, expected_ret);
2801 /// If an expression has any sub-expressions that result in a type error,
2802 /// inspecting that expression's type with `ty.references_error()` will return
2803 /// true. Likewise, if an expression is known to diverge, inspecting its
2804 /// type with `ty::type_is_bot` will return true (n.b.: since Rust is
2805 /// strict, _|_ can appear in the type of an expression that does not,
2806 /// itself, diverge: for example, fn() -> _|_.)
2807 /// Note that inspecting a type's structure *directly* may expose the fact
2808 /// that there are actually multiple representations for `TyError`, so avoid
2809 /// that when err needs to be handled differently.
2810 fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
2811 expr: &'tcx hir::Expr,
2812 expected: Expectation<'tcx>,
2813 lvalue_pref: LvaluePreference,
2817 debug!(">> typechecking: expr={:?} expected={:?}",
2820 // Checks a method call.
2821 fn check_method_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2822 expr: &'tcx hir::Expr,
2823 method_name: hir::SpannedIdent,
2824 args: &'tcx [P<hir::Expr>],
2826 expected: Expectation<'tcx>,
2827 lvalue_pref: LvaluePreference) {
2828 let rcvr = &*args[0];
2829 check_expr_with_lvalue_pref(fcx, &*rcvr, lvalue_pref);
2831 // no need to check for bot/err -- callee does that
2832 let expr_t = structurally_resolved_type(fcx,
2834 fcx.expr_ty(&*rcvr));
2836 let tps = tps.iter().map(|ast_ty| fcx.to_ty(&**ast_ty)).collect::<Vec<_>>();
2837 let fn_ty = match method::lookup(fcx,
2839 method_name.node.name,
2845 let method_ty = method.ty;
2846 let method_call = MethodCall::expr(expr.id);
2847 fcx.inh.tables.borrow_mut().method_map.insert(method_call, method);
2851 method::report_error(fcx, method_name.span, expr_t,
2852 method_name.node.name, Some(rcvr), error);
2853 fcx.write_error(expr.id);
2858 // Call the generic checker.
2859 let ret_ty = check_method_argument_types(fcx,
2867 write_call(fcx, expr, ret_ty);
2870 // A generic function for checking the then and else in an if
2872 fn check_then_else<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2873 cond_expr: &'tcx hir::Expr,
2874 then_blk: &'tcx hir::Block,
2875 opt_else_expr: Option<&'tcx hir::Expr>,
2878 expected: Expectation<'tcx>) {
2879 check_expr_has_type(fcx, cond_expr, fcx.tcx().types.bool);
2881 let expected = expected.adjust_for_branches(fcx);
2882 check_block_with_expected(fcx, then_blk, expected);
2883 let then_ty = fcx.node_ty(then_blk.id);
2885 let branches_ty = match opt_else_expr {
2886 Some(ref else_expr) => {
2887 check_expr_with_expectation(fcx, &**else_expr, expected);
2888 let else_ty = fcx.expr_ty(&**else_expr);
2889 infer::common_supertype(fcx.infcx(),
2890 infer::IfExpression(sp),
2896 infer::common_supertype(fcx.infcx(),
2897 infer::IfExpressionWithNoElse(sp),
2904 let cond_ty = fcx.expr_ty(cond_expr);
2905 let if_ty = if cond_ty.references_error() {
2911 fcx.write_ty(id, if_ty);
2914 // Check field access expressions
2915 fn check_field<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
2916 expr: &'tcx hir::Expr,
2917 lvalue_pref: LvaluePreference,
2918 base: &'tcx hir::Expr,
2919 field: &hir::SpannedIdent) {
2920 let tcx = fcx.ccx.tcx;
2921 check_expr_with_lvalue_pref(fcx, base, lvalue_pref);
2922 let expr_t = structurally_resolved_type(fcx, expr.span,
2924 // FIXME(eddyb) #12808 Integrate privacy into this auto-deref loop.
2925 let (_, autoderefs, field_ty) = autoderef(fcx,
2929 UnresolvedTypeAction::Error,
2933 ty::TyStruct(base_def, substs) => {
2934 debug!("struct named {:?}", base_t);
2935 base_def.struct_variant()
2936 .find_field_named(field.node.name)
2937 .map(|f| fcx.field_ty(expr.span, f, substs))
2944 fcx.write_ty(expr.id, field_ty);
2945 fcx.write_autoderef_adjustment(base.id, autoderefs);
2951 if method::exists(fcx, field.span, field.node.name, expr_t, expr.id) {
2952 fcx.type_error_message(
2955 format!("attempted to take value of method `{}` on type \
2956 `{}`", field.node, actual)
2960 tcx.sess.fileline_help(field.span,
2961 "maybe a `()` to call it is missing? \
2962 If not, try an anonymous function");
2964 fcx.type_error_message(
2967 format!("attempted access of field `{}` on \
2968 type `{}`, but no field with that \
2974 if let ty::TyStruct(def, _) = expr_t.sty {
2975 suggest_field_names(def.struct_variant(), field, tcx, vec![]);
2979 fcx.write_error(expr.id);
2982 // displays hints about the closest matches in field names
2983 fn suggest_field_names<'tcx>(variant: ty::VariantDef<'tcx>,
2984 field: &hir::SpannedIdent,
2985 tcx: &ty::ctxt<'tcx>,
2986 skip : Vec<InternedString>) {
2987 let name = field.node.name.as_str();
2988 // only find fits with at least one matching letter
2989 let mut best_dist = name.len();
2990 let mut best = None;
2991 for elem in &variant.fields {
2992 let n = elem.name.as_str();
2993 // ignore already set fields
2994 if skip.iter().any(|x| *x == n) {
2997 // ignore private fields from non-local crates
2998 if variant.did.krate != LOCAL_CRATE && elem.vis != Visibility::Public {
3001 let dist = lev_distance(&n, &name);
3002 if dist < best_dist {
3007 if let Some(n) = best {
3008 tcx.sess.span_help(field.span,
3009 &format!("did you mean `{}`?", n));
3013 // Check tuple index expressions
3014 fn check_tup_field<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
3015 expr: &'tcx hir::Expr,
3016 lvalue_pref: LvaluePreference,
3017 base: &'tcx hir::Expr,
3018 idx: codemap::Spanned<usize>) {
3019 check_expr_with_lvalue_pref(fcx, base, lvalue_pref);
3020 let expr_t = structurally_resolved_type(fcx, expr.span,
3022 let mut tuple_like = false;
3023 // FIXME(eddyb) #12808 Integrate privacy into this auto-deref loop.
3024 let (_, autoderefs, field_ty) = autoderef(fcx,
3028 UnresolvedTypeAction::Error,
3032 ty::TyStruct(base_def, substs) => {
3033 tuple_like = base_def.struct_variant().is_tuple_struct();
3035 debug!("tuple struct named {:?}", base_t);
3036 base_def.struct_variant()
3039 .map(|f| fcx.field_ty(expr.span, f, substs))
3044 ty::TyTuple(ref v) => {
3046 if idx.node < v.len() { Some(v[idx.node]) } else { None }
3053 fcx.write_ty(expr.id, field_ty);
3054 fcx.write_autoderef_adjustment(base.id, autoderefs);
3059 fcx.type_error_message(
3063 format!("attempted out-of-bounds tuple index `{}` on \
3068 format!("attempted tuple index `{}` on type `{}`, but the \
3069 type was not a tuple or tuple struct",
3076 fcx.write_error(expr.id);
3079 fn report_unknown_field<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
3081 variant: ty::VariantDef<'tcx>,
3083 skip_fields: &[hir::Field]) {
3084 fcx.type_error_message(
3086 |actual| if let ty::TyEnum(..) = ty.sty {
3087 format!("struct variant `{}::{}` has no field named `{}`",
3088 actual, variant.name.as_str(), field.ident.node)
3090 format!("structure `{}` has no field named `{}`",
3091 actual, field.ident.node)
3095 // prevent all specified fields from being suggested
3096 let skip_fields = skip_fields.iter().map(|ref x| x.ident.node.name.as_str());
3097 suggest_field_names(variant, &field.ident, fcx.tcx(), skip_fields.collect());
3101 fn check_expr_struct_fields<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
3104 variant: ty::VariantDef<'tcx>,
3105 ast_fields: &'tcx [hir::Field],
3106 check_completeness: bool) {
3107 let tcx = fcx.ccx.tcx;
3108 let substs = match adt_ty.sty {
3109 ty::TyStruct(_, substs) | ty::TyEnum(_, substs) => substs,
3110 _ => tcx.sess.span_bug(span, "non-ADT passed to check_expr_struct_fields")
3113 let mut remaining_fields = FnvHashMap();
3114 for field in &variant.fields {
3115 remaining_fields.insert(field.name, field);
3118 let mut error_happened = false;
3120 // Typecheck each field.
3121 for field in ast_fields {
3122 let expected_field_type;
3124 if let Some(v_field) = remaining_fields.remove(&field.ident.node.name) {
3125 expected_field_type = fcx.field_ty(field.span, v_field, substs);
3127 error_happened = true;
3128 expected_field_type = tcx.types.err;
3129 if let Some(_) = variant.find_field_named(field.ident.node.name) {
3130 span_err!(fcx.tcx().sess, field.ident.span, E0062,
3131 "field `{}` specified more than once",
3134 report_unknown_field(fcx, adt_ty, variant, field, ast_fields);
3138 // Make sure to give a type to the field even if there's
3139 // an error, so we can continue typechecking
3140 check_expr_coercable_to_type(fcx, &*field.expr, expected_field_type);
3143 // Make sure the programmer specified all the fields.
3144 if check_completeness &&
3146 !remaining_fields.is_empty()
3148 span_err!(tcx.sess, span, E0063,
3149 "missing field{}: {}",
3150 if remaining_fields.len() == 1 {""} else {"s"},
3151 remaining_fields.keys()
3152 .map(|n| format!("`{}`", n))
3153 .collect::<Vec<_>>()
3159 fn check_struct_fields_on_error<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
3161 fields: &'tcx [hir::Field],
3162 base_expr: &'tcx Option<P<hir::Expr>>) {
3163 // Make sure to still write the types
3164 // otherwise we might ICE
3165 fcx.write_error(id);
3166 for field in fields {
3167 check_expr(fcx, &*field.expr);
3170 Some(ref base) => check_expr(fcx, &**base),
3175 fn check_expr_struct<'a, 'tcx>(fcx: &FnCtxt<'a,'tcx>,
3178 fields: &'tcx [hir::Field],
3179 base_expr: &'tcx Option<P<hir::Expr>>)
3181 let tcx = fcx.tcx();
3183 // Find the relevant variant
3184 let def = lookup_full_def(tcx, path.span, expr.id);
3185 let (adt, variant) = match fcx.def_struct_variant(def) {
3186 Some((adt, variant)) => (adt, variant),
3188 span_err!(fcx.tcx().sess, path.span, E0071,
3189 "`{}` does not name a structure",
3190 pprust::path_to_string(path));
3191 check_struct_fields_on_error(fcx, expr.id, fields, base_expr);
3196 let expr_ty = fcx.instantiate_type(def.def_id(), path);
3197 fcx.write_ty(expr.id, expr_ty);
3199 check_expr_struct_fields(fcx, expr_ty, expr.span, variant, fields,
3200 base_expr.is_none());
3202 if let &Some(ref base_expr) = base_expr {
3203 check_expr_has_type(fcx, base_expr, expr_ty);
3204 if adt.adt_kind() == ty::AdtKind::Enum {
3205 span_err!(tcx.sess, base_expr.span, E0436,
3206 "functional record update syntax requires a struct");
3211 type ExprCheckerWithTy = fn(&FnCtxt, &hir::Expr, Ty);
3213 let tcx = fcx.ccx.tcx;
3216 hir::ExprBox(ref opt_place, ref subexpr) => {
3217 opt_place.as_ref().map(|place|check_expr(fcx, &**place));
3218 check_expr(fcx, &**subexpr);
3220 let mut checked = false;
3221 opt_place.as_ref().map(|place| match place.node {
3222 hir::ExprPath(None, ref path) => {
3223 // FIXME(pcwalton): For now we hardcode the only permissible
3224 // place: the exchange heap.
3225 let definition = lookup_full_def(tcx, path.span, place.id);
3226 let def_id = definition.def_id();
3227 let referent_ty = fcx.expr_ty(&**subexpr);
3228 if tcx.lang_items.exchange_heap() == Some(def_id) {
3229 fcx.write_ty(id, tcx.mk_box(referent_ty));
3237 span_err!(tcx.sess, expr.span, E0066,
3238 "only the exchange heap is currently supported");
3239 fcx.write_ty(id, tcx.types.err);
3243 hir::ExprLit(ref lit) => {
3244 let typ = check_lit(fcx, &**lit, expected);
3245 fcx.write_ty(id, typ);
3247 hir::ExprBinary(op, ref lhs, ref rhs) => {
3248 op::check_binop(fcx, expr, op, lhs, rhs);
3250 hir::ExprAssignOp(op, ref lhs, ref rhs) => {
3251 op::check_binop_assign(fcx, expr, op, lhs, rhs);
3253 hir::ExprUnary(unop, ref oprnd) => {
3254 let expected_inner = expected.to_option(fcx).map_or(NoExpectation, |ty| {
3256 hir::UnUniq => match ty.sty {
3258 Expectation::rvalue_hint(tcx, ty)
3264 hir::UnNot | hir::UnNeg => {
3272 let lvalue_pref = match unop {
3273 hir::UnDeref => lvalue_pref,
3276 check_expr_with_expectation_and_lvalue_pref(
3277 fcx, &**oprnd, expected_inner, lvalue_pref);
3278 let mut oprnd_t = fcx.expr_ty(&**oprnd);
3280 if !oprnd_t.references_error() {
3283 oprnd_t = tcx.mk_box(oprnd_t);
3286 oprnd_t = structurally_resolved_type(fcx, expr.span, oprnd_t);
3287 oprnd_t = match oprnd_t.builtin_deref(true, NoPreference) {
3289 None => match try_overloaded_deref(fcx, expr.span,
3290 Some(MethodCall::expr(expr.id)),
3291 Some(&**oprnd), oprnd_t, lvalue_pref) {
3294 fcx.type_error_message(expr.span, |actual| {
3295 format!("type `{}` cannot be \
3296 dereferenced", actual)
3304 oprnd_t = structurally_resolved_type(fcx, oprnd.span,
3306 if !(oprnd_t.is_integral() || oprnd_t.sty == ty::TyBool) {
3307 oprnd_t = op::check_user_unop(fcx, "!", "not",
3308 tcx.lang_items.not_trait(),
3309 expr, &**oprnd, oprnd_t, unop);
3313 oprnd_t = structurally_resolved_type(fcx, oprnd.span,
3315 if !(oprnd_t.is_integral() || oprnd_t.is_fp()) {
3316 oprnd_t = op::check_user_unop(fcx, "-", "neg",
3317 tcx.lang_items.neg_trait(),
3318 expr, &**oprnd, oprnd_t, unop);
3323 fcx.write_ty(id, oprnd_t);
3325 hir::ExprAddrOf(mutbl, ref oprnd) => {
3326 let hint = expected.only_has_type(fcx).map_or(NoExpectation, |ty| {
3328 ty::TyRef(_, ref mt) | ty::TyRawPtr(ref mt) => {
3329 if fcx.tcx().expr_is_lval(&**oprnd) {
3330 // Lvalues may legitimately have unsized types.
3331 // For example, dereferences of a fat pointer and
3332 // the last field of a struct can be unsized.
3333 ExpectHasType(mt.ty)
3335 Expectation::rvalue_hint(tcx, mt.ty)
3341 let lvalue_pref = LvaluePreference::from_mutbl(mutbl);
3342 check_expr_with_expectation_and_lvalue_pref(fcx,
3347 let tm = ty::TypeAndMut { ty: fcx.expr_ty(&**oprnd), mutbl: mutbl };
3348 let oprnd_t = if tm.ty.references_error() {
3351 // Note: at this point, we cannot say what the best lifetime
3352 // is to use for resulting pointer. We want to use the
3353 // shortest lifetime possible so as to avoid spurious borrowck
3354 // errors. Moreover, the longest lifetime will depend on the
3355 // precise details of the value whose address is being taken
3356 // (and how long it is valid), which we don't know yet until type
3357 // inference is complete.
3359 // Therefore, here we simply generate a region variable. The
3360 // region inferencer will then select the ultimate value.
3361 // Finally, borrowck is charged with guaranteeing that the
3362 // value whose address was taken can actually be made to live
3363 // as long as it needs to live.
3364 let region = fcx.infcx().next_region_var(infer::AddrOfRegion(expr.span));
3365 tcx.mk_ref(tcx.mk_region(region), tm)
3367 fcx.write_ty(id, oprnd_t);
3369 hir::ExprPath(ref maybe_qself, ref path) => {
3370 let opt_self_ty = maybe_qself.as_ref().map(|qself| {
3371 fcx.to_ty(&qself.ty)
3374 let path_res = if let Some(&d) = tcx.def_map.borrow().get(&id) {
3376 } else if let Some(hir::QSelf { position: 0, .. }) = *maybe_qself {
3377 // Create some fake resolution that can't possibly be a type.
3378 def::PathResolution {
3379 base_def: def::DefMod(DefId::local(ast::CRATE_NODE_ID)),
3380 last_private: LastMod(AllPublic),
3381 depth: path.segments.len()
3384 tcx.sess.span_bug(expr.span,
3385 &format!("unbound path {:?}", expr))
3388 if let Some((opt_ty, segments, def)) =
3389 resolve_ty_and_def_ufcs(fcx, path_res, opt_self_ty, path,
3390 expr.span, expr.id) {
3391 let (scheme, predicates) = type_scheme_and_predicates_for_def(fcx,
3394 instantiate_path(fcx,
3404 // We always require that the type provided as the value for
3405 // a type parameter outlives the moment of instantiation.
3406 fcx.opt_node_ty_substs(expr.id, |item_substs| {
3407 fcx.add_wf_bounds(&item_substs.substs, expr);
3410 hir::ExprInlineAsm(ref ia) => {
3411 for &(_, ref input) in &ia.inputs {
3412 check_expr(fcx, &**input);
3414 for &(_, ref out, _) in &ia.outputs {
3415 check_expr(fcx, &**out);
3419 hir::ExprBreak(_) => { fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); }
3420 hir::ExprAgain(_) => { fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); }
3421 hir::ExprRet(ref expr_opt) => {
3423 ty::FnConverging(result_type) => {
3426 if let Err(_) = fcx.mk_eqty(false, infer::Misc(expr.span),
3427 result_type, fcx.tcx().mk_nil()) {
3428 span_err!(tcx.sess, expr.span, E0069,
3429 "`return;` in a function whose return type is \
3433 check_expr_coercable_to_type(fcx, &**e, result_type);
3437 ty::FnDiverging => {
3438 if let Some(ref e) = *expr_opt {
3439 check_expr(fcx, &**e);
3441 span_err!(tcx.sess, expr.span, E0166,
3442 "`return` in a function declared as diverging");
3445 fcx.write_ty(id, fcx.infcx().next_diverging_ty_var());
3447 hir::ExprParen(ref a) => {
3448 check_expr_with_expectation_and_lvalue_pref(fcx,
3452 fcx.write_ty(id, fcx.expr_ty(&**a));
3454 hir::ExprAssign(ref lhs, ref rhs) => {
3455 check_expr_with_lvalue_pref(fcx, &**lhs, PreferMutLvalue);
3457 let tcx = fcx.tcx();
3458 if !tcx.expr_is_lval(&**lhs) {
3459 span_err!(tcx.sess, expr.span, E0070,
3460 "invalid left-hand side expression");
3463 let lhs_ty = fcx.expr_ty(&**lhs);
3464 check_expr_coercable_to_type(fcx, &**rhs, lhs_ty);
3465 let rhs_ty = fcx.expr_ty(&**rhs);
3467 fcx.require_expr_have_sized_type(&**lhs, traits::AssignmentLhsSized);
3469 if lhs_ty.references_error() || rhs_ty.references_error() {
3470 fcx.write_error(id);
3475 hir::ExprIf(ref cond, ref then_blk, ref opt_else_expr) => {
3476 check_then_else(fcx, &**cond, &**then_blk, opt_else_expr.as_ref().map(|e| &**e),
3477 id, expr.span, expected);
3479 hir::ExprWhile(ref cond, ref body, _) => {
3480 check_expr_has_type(fcx, &**cond, tcx.types.bool);
3481 check_block_no_value(fcx, &**body);
3482 let cond_ty = fcx.expr_ty(&**cond);
3483 let body_ty = fcx.node_ty(body.id);
3484 if cond_ty.references_error() || body_ty.references_error() {
3485 fcx.write_error(id);
3491 hir::ExprLoop(ref body, _) => {
3492 check_block_no_value(fcx, &**body);
3493 if !may_break(tcx, expr.id, &**body) {
3494 fcx.write_ty(id, fcx.infcx().next_diverging_ty_var());
3499 hir::ExprMatch(ref discrim, ref arms, match_src) => {
3500 _match::check_match(fcx, expr, &**discrim, arms, expected, match_src);
3502 hir::ExprClosure(capture, ref decl, ref body) => {
3503 closure::check_expr_closure(fcx, expr, capture, &**decl, &**body, expected);
3505 hir::ExprBlock(ref b) => {
3506 check_block_with_expected(fcx, &**b, expected);
3507 fcx.write_ty(id, fcx.node_ty(b.id));
3509 hir::ExprCall(ref callee, ref args) => {
3510 callee::check_call(fcx, expr, &**callee, &args[..], expected);
3512 // we must check that return type of called functions is WF:
3513 let ret_ty = fcx.expr_ty(expr);
3514 fcx.register_wf_obligation(ret_ty, expr.span, traits::MiscObligation);
3516 hir::ExprMethodCall(ident, ref tps, ref args) => {
3517 check_method_call(fcx, expr, ident, &args[..], &tps[..], expected, lvalue_pref);
3518 let arg_tys = args.iter().map(|a| fcx.expr_ty(&**a));
3519 let args_err = arg_tys.fold(false, |rest_err, a| rest_err || a.references_error());
3521 fcx.write_error(id);
3524 hir::ExprCast(ref e, ref t) => {
3525 if let hir::TyFixedLengthVec(_, ref count_expr) = t.node {
3526 check_expr_with_hint(fcx, &**count_expr, tcx.types.usize);
3529 // Find the type of `e`. Supply hints based on the type we are casting to,
3531 let t_cast = fcx.to_ty(t);
3532 let t_cast = structurally_resolved_type(fcx, expr.span, t_cast);
3533 check_expr_with_expectation(fcx, e, ExpectCastableToType(t_cast));
3534 let t_expr = fcx.expr_ty(e);
3536 // Eagerly check for some obvious errors.
3537 if t_expr.references_error() {
3538 fcx.write_error(id);
3539 } else if !fcx.type_is_known_to_be_sized(t_cast, expr.span) {
3540 report_cast_to_unsized_type(fcx, expr.span, t.span, e.span, t_cast, t_expr, id);
3542 // Write a type for the whole expression, assuming everything is going
3544 fcx.write_ty(id, t_cast);
3546 // Defer other checks until we're done type checking.
3547 let mut deferred_cast_checks = fcx.inh.deferred_cast_checks.borrow_mut();
3548 let cast_check = cast::CastCheck::new((**e).clone(), t_expr, t_cast, expr.span);
3549 deferred_cast_checks.push(cast_check);
3552 hir::ExprVec(ref args) => {
3553 let uty = expected.to_option(fcx).and_then(|uty| {
3555 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3560 let typ = match uty {
3563 check_expr_coercable_to_type(fcx, &**e, uty);
3568 let t: Ty = fcx.infcx().next_ty_var();
3570 check_expr_has_type(fcx, &**e, t);
3575 let typ = tcx.mk_array(typ, args.len());
3576 fcx.write_ty(id, typ);
3578 hir::ExprRepeat(ref element, ref count_expr) => {
3579 check_expr_has_type(fcx, &**count_expr, tcx.types.usize);
3580 let count = fcx.tcx().eval_repeat_count(&**count_expr);
3582 let uty = match expected {
3583 ExpectHasType(uty) => {
3585 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3592 let (element_ty, t) = match uty {
3594 check_expr_coercable_to_type(fcx, &**element, uty);
3598 let t: Ty = fcx.infcx().next_ty_var();
3599 check_expr_has_type(fcx, &**element, t);
3600 (fcx.expr_ty(&**element), t)
3605 // For [foo, ..n] where n > 1, `foo` must have
3607 fcx.require_type_meets(
3614 if element_ty.references_error() {
3615 fcx.write_error(id);
3617 let t = tcx.mk_array(t, count);
3618 fcx.write_ty(id, t);
3621 hir::ExprTup(ref elts) => {
3622 let flds = expected.only_has_type(fcx).and_then(|ty| {
3624 ty::TyTuple(ref flds) => Some(&flds[..]),
3628 let mut err_field = false;
3630 let elt_ts = elts.iter().enumerate().map(|(i, e)| {
3631 let t = match flds {
3632 Some(ref fs) if i < fs.len() => {
3634 check_expr_coercable_to_type(fcx, &**e, ety);
3638 check_expr_with_expectation(fcx, &**e, NoExpectation);
3642 err_field = err_field || t.references_error();
3646 fcx.write_error(id);
3648 let typ = tcx.mk_tup(elt_ts);
3649 fcx.write_ty(id, typ);
3652 hir::ExprStruct(ref path, ref fields, ref base_expr) => {
3653 check_expr_struct(fcx, expr, path, fields, base_expr);
3655 fcx.require_expr_have_sized_type(expr, traits::StructInitializerSized);
3657 hir::ExprField(ref base, ref field) => {
3658 check_field(fcx, expr, lvalue_pref, &**base, field);
3660 hir::ExprTupField(ref base, idx) => {
3661 check_tup_field(fcx, expr, lvalue_pref, &**base, idx);
3663 hir::ExprIndex(ref base, ref idx) => {
3664 check_expr_with_lvalue_pref(fcx, &**base, lvalue_pref);
3665 check_expr(fcx, &**idx);
3667 let base_t = fcx.expr_ty(&**base);
3668 let idx_t = fcx.expr_ty(&**idx);
3670 if base_t.references_error() {
3671 fcx.write_ty(id, base_t);
3672 } else if idx_t.references_error() {
3673 fcx.write_ty(id, idx_t);
3675 let base_t = structurally_resolved_type(fcx, expr.span, base_t);
3676 match lookup_indexing(fcx, expr, base, base_t, idx_t, lvalue_pref) {
3677 Some((index_ty, element_ty)) => {
3678 let idx_expr_ty = fcx.expr_ty(idx);
3679 demand::eqtype(fcx, expr.span, index_ty, idx_expr_ty);
3680 fcx.write_ty(id, element_ty);
3683 check_expr_has_type(fcx, &**idx, fcx.tcx().types.err);
3684 fcx.type_error_message(
3687 format!("cannot index a value of type `{}`",
3692 fcx.write_ty(id, fcx.tcx().types.err);
3697 hir::ExprRange(ref start, ref end) => {
3698 let t_start = start.as_ref().map(|e| {
3699 check_expr(fcx, &**e);
3702 let t_end = end.as_ref().map(|e| {
3703 check_expr(fcx, &**e);
3707 let idx_type = match (t_start, t_end) {
3708 (Some(ty), None) | (None, Some(ty)) => {
3711 (Some(t_start), Some(t_end)) if (t_start.references_error() ||
3712 t_end.references_error()) => {
3713 Some(fcx.tcx().types.err)
3715 (Some(t_start), Some(t_end)) => {
3716 Some(infer::common_supertype(fcx.infcx(),
3717 infer::RangeExpression(expr.span),
3725 // Note that we don't check the type of start/end satisfy any
3726 // bounds because right now the range structs do not have any. If we add
3727 // some bounds, then we'll need to check `t_start` against them here.
3729 let range_type = match idx_type {
3730 Some(idx_type) if idx_type.references_error() => {
3734 // Find the did from the appropriate lang item.
3735 let did = match (start, end) {
3736 (&Some(_), &Some(_)) => tcx.lang_items.range_struct(),
3737 (&Some(_), &None) => tcx.lang_items.range_from_struct(),
3738 (&None, &Some(_)) => tcx.lang_items.range_to_struct(),
3740 tcx.sess.span_bug(expr.span, "full range should be dealt with above")
3744 if let Some(did) = did {
3745 let def = tcx.lookup_adt_def(did);
3746 let predicates = tcx.lookup_predicates(did);
3747 let substs = Substs::new_type(vec![idx_type], vec![]);
3748 let bounds = fcx.instantiate_bounds(expr.span, &substs, &predicates);
3749 fcx.add_obligations_for_parameters(
3750 traits::ObligationCause::new(expr.span,
3752 traits::ItemObligation(did)),
3755 tcx.mk_struct(def, tcx.mk_substs(substs))
3757 span_err!(tcx.sess, expr.span, E0236, "no lang item for range syntax");
3762 // Neither start nor end => RangeFull
3763 if let Some(did) = tcx.lang_items.range_full_struct() {
3765 tcx.lookup_adt_def(did),
3766 tcx.mk_substs(Substs::empty())
3769 span_err!(tcx.sess, expr.span, E0237, "no lang item for range syntax");
3775 fcx.write_ty(id, range_type);
3780 debug!("type of expr({}) {} is...", expr.id,
3781 pprust::expr_to_string(expr));
3782 debug!("... {:?}, expected is {:?}",
3789 pub fn resolve_ty_and_def_ufcs<'a, 'b, 'tcx>(fcx: &FnCtxt<'b, 'tcx>,
3790 path_res: def::PathResolution,
3791 opt_self_ty: Option<Ty<'tcx>>,
3792 path: &'a hir::Path,
3794 node_id: ast::NodeId)
3795 -> Option<(Option<Ty<'tcx>>,
3796 &'a [hir::PathSegment],
3800 // Associated constants can't depend on generic types.
3801 fn have_disallowed_generic_consts<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
3805 node_id: ast::NodeId) -> bool {
3807 def::DefAssociatedConst(..) => {
3808 if ty.has_param_types() || ty.has_self_ty() {
3809 span_err!(fcx.sess(), span, E0329,
3810 "Associated consts cannot depend \
3811 on type parameters or Self.");
3812 fcx.write_error(node_id);
3821 // If fully resolved already, we don't have to do anything.
3822 if path_res.depth == 0 {
3823 if let Some(ty) = opt_self_ty {
3824 if have_disallowed_generic_consts(fcx, path_res.full_def(), ty,
3829 Some((opt_self_ty, &path.segments, path_res.base_def))
3831 let mut def = path_res.base_def;
3832 let ty_segments = path.segments.split_last().unwrap().1;
3833 let base_ty_end = path.segments.len() - path_res.depth;
3834 let ty = astconv::finish_resolving_def_to_ty(fcx, fcx, span,
3835 PathParamMode::Optional,
3838 &ty_segments[..base_ty_end],
3839 &ty_segments[base_ty_end..]);
3840 let item_segment = path.segments.last().unwrap();
3841 let item_name = item_segment.identifier.name;
3842 match method::resolve_ufcs(fcx, span, item_name, ty, node_id) {
3844 if have_disallowed_generic_consts(fcx, def, ty, span, node_id) {
3847 // Write back the new resolution.
3848 fcx.ccx.tcx.def_map.borrow_mut()
3849 .insert(node_id, def::PathResolution {
3851 last_private: path_res.last_private.or(lp),
3854 Some((Some(ty), slice::ref_slice(item_segment), def))
3857 method::report_error(fcx, span, ty,
3858 item_name, None, error);
3859 fcx.write_error(node_id);
3866 impl<'tcx> Expectation<'tcx> {
3867 /// Provide an expectation for an rvalue expression given an *optional*
3868 /// hint, which is not required for type safety (the resulting type might
3869 /// be checked higher up, as is the case with `&expr` and `box expr`), but
3870 /// is useful in determining the concrete type.
3872 /// The primary use case is where the expected type is a fat pointer,
3873 /// like `&[isize]`. For example, consider the following statement:
3875 /// let x: &[isize] = &[1, 2, 3];
3877 /// In this case, the expected type for the `&[1, 2, 3]` expression is
3878 /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
3879 /// expectation `ExpectHasType([isize])`, that would be too strong --
3880 /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
3881 /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
3882 /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
3883 /// which still is useful, because it informs integer literals and the like.
3884 /// See the test case `test/run-pass/coerce-expect-unsized.rs` and #20169
3885 /// for examples of where this comes up,.
3886 fn rvalue_hint(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> {
3887 match tcx.struct_tail(ty).sty {
3888 ty::TySlice(_) | ty::TyTrait(..) => {
3889 ExpectRvalueLikeUnsized(ty)
3891 _ => ExpectHasType(ty)
3895 // Resolves `expected` by a single level if it is a variable. If
3896 // there is no expected type or resolution is not possible (e.g.,
3897 // no constraints yet present), just returns `None`.
3898 fn resolve<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> {
3903 ExpectCastableToType(t) => {
3904 ExpectCastableToType(
3905 fcx.infcx().resolve_type_vars_if_possible(&t))
3907 ExpectHasType(t) => {
3909 fcx.infcx().resolve_type_vars_if_possible(&t))
3911 ExpectRvalueLikeUnsized(t) => {
3912 ExpectRvalueLikeUnsized(
3913 fcx.infcx().resolve_type_vars_if_possible(&t))
3918 fn to_option<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
3919 match self.resolve(fcx) {
3920 NoExpectation => None,
3921 ExpectCastableToType(ty) |
3923 ExpectRvalueLikeUnsized(ty) => Some(ty),
3927 fn only_has_type<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
3928 match self.resolve(fcx) {
3929 ExpectHasType(ty) => Some(ty),
3935 pub fn check_decl_initializer<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
3936 local: &'tcx hir::Local,
3937 init: &'tcx hir::Expr)
3939 let ref_bindings = fcx.tcx().pat_contains_ref_binding(&local.pat);
3941 let local_ty = fcx.local_ty(init.span, local.id);
3942 if let Some(m) = ref_bindings {
3943 // Somewhat subtle: if we have a `ref` binding in the pattern,
3944 // we want to avoid introducing coercions for the RHS. This is
3945 // both because it helps preserve sanity and, in the case of
3946 // ref mut, for soundness (issue #23116). In particular, in
3947 // the latter case, we need to be clear that the type of the
3948 // referent for the reference that results is *equal to* the
3949 // type of the lvalue it is referencing, and not some
3950 // supertype thereof.
3951 check_expr_with_lvalue_pref(fcx, init, LvaluePreference::from_mutbl(m));
3952 let init_ty = fcx.expr_ty(init);
3953 demand::eqtype(fcx, init.span, init_ty, local_ty);
3955 check_expr_coercable_to_type(fcx, init, local_ty)
3959 pub fn check_decl_local<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, local: &'tcx hir::Local) {
3960 let tcx = fcx.ccx.tcx;
3962 let t = fcx.local_ty(local.span, local.id);
3963 fcx.write_ty(local.id, t);
3965 if let Some(ref init) = local.init {
3966 check_decl_initializer(fcx, local, &**init);
3967 let init_ty = fcx.expr_ty(&**init);
3968 if init_ty.references_error() {
3969 fcx.write_ty(local.id, init_ty);
3973 let pcx = pat_ctxt {
3975 map: pat_id_map(&tcx.def_map, &*local.pat),
3977 _match::check_pat(&pcx, &*local.pat, t);
3978 let pat_ty = fcx.node_ty(local.pat.id);
3979 if pat_ty.references_error() {
3980 fcx.write_ty(local.id, pat_ty);
3984 pub fn check_stmt<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, stmt: &'tcx hir::Stmt) {
3986 let mut saw_bot = false;
3987 let mut saw_err = false;
3989 hir::StmtDecl(ref decl, id) => {
3992 hir::DeclLocal(ref l) => {
3993 check_decl_local(fcx, &**l);
3994 let l_t = fcx.node_ty(l.id);
3995 saw_bot = saw_bot || fcx.infcx().type_var_diverges(l_t);
3996 saw_err = saw_err || l_t.references_error();
3998 hir::DeclItem(_) => {/* ignore for now */ }
4001 hir::StmtExpr(ref expr, id) => {
4003 // Check with expected type of ()
4004 check_expr_has_type(fcx, &**expr, fcx.tcx().mk_nil());
4005 let expr_ty = fcx.expr_ty(&**expr);
4006 saw_bot = saw_bot || fcx.infcx().type_var_diverges(expr_ty);
4007 saw_err = saw_err || expr_ty.references_error();
4009 hir::StmtSemi(ref expr, id) => {
4011 check_expr(fcx, &**expr);
4012 let expr_ty = fcx.expr_ty(&**expr);
4013 saw_bot |= fcx.infcx().type_var_diverges(expr_ty);
4014 saw_err |= expr_ty.references_error();
4018 fcx.write_ty(node_id, fcx.infcx().next_diverging_ty_var());
4021 fcx.write_error(node_id);
4024 fcx.write_nil(node_id)
4028 pub fn check_block_no_value<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, blk: &'tcx hir::Block) {
4029 check_block_with_expected(fcx, blk, ExpectHasType(fcx.tcx().mk_nil()));
4030 let blkty = fcx.node_ty(blk.id);
4031 if blkty.references_error() {
4032 fcx.write_error(blk.id);
4034 let nilty = fcx.tcx().mk_nil();
4035 demand::suptype(fcx, blk.span, nilty, blkty);
4039 fn check_block_with_expected<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4040 blk: &'tcx hir::Block,
4041 expected: Expectation<'tcx>) {
4043 let mut fcx_ps = fcx.ps.borrow_mut();
4044 let unsafety_state = fcx_ps.recurse(blk);
4045 replace(&mut *fcx_ps, unsafety_state)
4048 let mut warned = false;
4049 let mut any_diverges = false;
4050 let mut any_err = false;
4051 for s in &blk.stmts {
4052 check_stmt(fcx, &**s);
4053 let s_id = ::rustc_front::util::stmt_id(&**s);
4054 let s_ty = fcx.node_ty(s_id);
4055 if any_diverges && !warned && match s.node {
4056 hir::StmtDecl(ref decl, _) => {
4058 hir::DeclLocal(_) => true,
4062 hir::StmtExpr(_, _) | hir::StmtSemi(_, _) => true,
4067 .add_lint(lint::builtin::UNREACHABLE_CODE,
4070 "unreachable statement".to_string());
4073 any_diverges = any_diverges || fcx.infcx().type_var_diverges(s_ty);
4074 any_err = any_err || s_ty.references_error();
4077 None => if any_err {
4078 fcx.write_error(blk.id);
4079 } else if any_diverges {
4080 fcx.write_ty(blk.id, fcx.infcx().next_diverging_ty_var());
4082 fcx.write_nil(blk.id);
4085 if any_diverges && !warned {
4089 .add_lint(lint::builtin::UNREACHABLE_CODE,
4092 "unreachable expression".to_string());
4094 let ety = match expected {
4095 ExpectHasType(ety) => {
4096 check_expr_coercable_to_type(fcx, &**e, ety);
4100 check_expr_with_expectation(fcx, &**e, expected);
4106 fcx.write_error(blk.id);
4107 } else if any_diverges {
4108 fcx.write_ty(blk.id, fcx.infcx().next_diverging_ty_var());
4110 fcx.write_ty(blk.id, ety);
4115 *fcx.ps.borrow_mut() = prev;
4118 /// Checks a constant appearing in a type. At the moment this is just the
4119 /// length expression in a fixed-length vector, but someday it might be
4120 /// extended to type-level numeric literals.
4121 fn check_const_in_type<'a,'tcx>(ccx: &'a CrateCtxt<'a,'tcx>,
4122 expr: &'tcx hir::Expr,
4123 expected_type: Ty<'tcx>) {
4124 let tables = RefCell::new(ty::Tables::empty());
4125 let inh = static_inherited_fields(ccx, &tables);
4126 let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(expected_type), expr.id);
4127 check_const_with_ty(&fcx, expr.span, expr, expected_type);
4130 fn check_const<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
4134 let tables = RefCell::new(ty::Tables::empty());
4135 let inh = static_inherited_fields(ccx, &tables);
4136 let rty = ccx.tcx.node_id_to_type(id);
4137 let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(rty), e.id);
4138 let declty = fcx.ccx.tcx.lookup_item_type(DefId::local(id)).ty;
4139 check_const_with_ty(&fcx, sp, e, declty);
4142 fn check_const_with_ty<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4146 // Gather locals in statics (because of block expressions).
4147 // This is technically unnecessary because locals in static items are forbidden,
4148 // but prevents type checking from blowing up before const checking can properly
4150 GatherLocalsVisitor { fcx: fcx }.visit_expr(e);
4152 check_expr_with_hint(fcx, e, declty);
4153 demand::coerce(fcx, e.span, declty, e);
4154 fcx.select_all_obligations_or_error();
4156 regionck::regionck_expr(fcx, e);
4157 writeback::resolve_type_vars_in_expr(fcx, e);
4160 /// Checks whether a type can be represented in memory. In particular, it
4161 /// identifies types that contain themselves without indirection through a
4162 /// pointer, which would mean their size is unbounded.
4163 pub fn check_representable(tcx: &ty::ctxt,
4165 item_id: ast::NodeId,
4166 designation: &str) -> bool {
4167 let rty = tcx.node_id_to_type(item_id);
4169 // Check that it is possible to represent this type. This call identifies
4170 // (1) types that contain themselves and (2) types that contain a different
4171 // recursive type. It is only necessary to throw an error on those that
4172 // contain themselves. For case 2, there must be an inner type that will be
4173 // caught by case 1.
4174 match rty.is_representable(tcx, sp) {
4175 Representability::SelfRecursive => {
4176 span_err!(tcx.sess, sp, E0072, "invalid recursive {} type", designation);
4177 tcx.sess.fileline_help(
4178 sp, "wrap the inner value in a box to make it representable");
4181 Representability::Representable | Representability::ContainsRecursive => (),
4186 pub fn check_simd(tcx: &ty::ctxt, sp: Span, id: ast::NodeId) {
4187 let t = tcx.node_id_to_type(id);
4189 ty::TyStruct(def, substs) => {
4190 let fields = &def.struct_variant().fields;
4191 if fields.is_empty() {
4192 span_err!(tcx.sess, sp, E0075, "SIMD vector cannot be empty");
4195 let e = fields[0].ty(tcx, substs);
4196 if !fields.iter().all(|f| f.ty(tcx, substs) == e) {
4197 span_err!(tcx.sess, sp, E0076, "SIMD vector should be homogeneous");
4201 ty::TyParam(_) => { /* struct<T>(T, T, T, T) is ok */ }
4202 _ if e.is_machine() => { /* struct(u8, u8, u8, u8) is ok */ }
4204 span_err!(tcx.sess, sp, E0077,
4205 "SIMD vector element type should be machine type");
4214 pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
4216 vs: &'tcx [P<hir::Variant>],
4219 fn disr_in_range(ccx: &CrateCtxt,
4221 disr: ty::Disr) -> bool {
4222 fn uint_in_range(ccx: &CrateCtxt, ty: hir::UintTy, disr: ty::Disr) -> bool {
4224 hir::TyU8 => disr as u8 as Disr == disr,
4225 hir::TyU16 => disr as u16 as Disr == disr,
4226 hir::TyU32 => disr as u32 as Disr == disr,
4227 hir::TyU64 => disr as u64 as Disr == disr,
4228 hir::TyUs => uint_in_range(ccx, ccx.tcx.sess.target.uint_type, disr)
4231 fn int_in_range(ccx: &CrateCtxt, ty: hir::IntTy, disr: ty::Disr) -> bool {
4233 hir::TyI8 => disr as i8 as Disr == disr,
4234 hir::TyI16 => disr as i16 as Disr == disr,
4235 hir::TyI32 => disr as i32 as Disr == disr,
4236 hir::TyI64 => disr as i64 as Disr == disr,
4237 hir::TyIs => int_in_range(ccx, ccx.tcx.sess.target.int_type, disr)
4241 attr::UnsignedInt(ty) => uint_in_range(ccx, ty, disr),
4242 attr::SignedInt(ty) => int_in_range(ccx, ty, disr)
4246 fn do_check<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
4247 vs: &'tcx [P<hir::Variant>],
4249 hint: attr::ReprAttr) {
4250 #![allow(trivial_numeric_casts)]
4252 let rty = ccx.tcx.node_id_to_type(id);
4253 let mut disr_vals: Vec<ty::Disr> = Vec::new();
4255 let tables = RefCell::new(ty::Tables::empty());
4256 let inh = static_inherited_fields(ccx, &tables);
4257 let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(rty), id);
4259 let (_, repr_type_ty) = ccx.tcx.enum_repr_type(Some(&hint));
4261 if let Some(ref e) = v.node.disr_expr {
4262 check_const_with_ty(&fcx, e.span, e, repr_type_ty);
4266 let def_id = DefId::local(id);
4268 let variants = &ccx.tcx.lookup_adt_def(def_id).variants;
4269 for (v, variant) in vs.iter().zip(variants.iter()) {
4270 let current_disr_val = variant.disr_val;
4272 // Check for duplicate discriminant values
4273 match disr_vals.iter().position(|&x| x == current_disr_val) {
4275 span_err!(ccx.tcx.sess, v.span, E0081,
4276 "discriminant value `{}` already exists", disr_vals[i]);
4277 span_note!(ccx.tcx.sess, ccx.tcx.map.span(variants[i].did.node),
4278 "conflicting discriminant here")
4282 // Check for unrepresentable discriminant values
4284 attr::ReprAny | attr::ReprExtern => (),
4285 attr::ReprInt(sp, ity) => {
4286 if !disr_in_range(ccx, ity, current_disr_val) {
4287 span_err!(ccx.tcx.sess, v.span, E0082,
4288 "discriminant value outside specified type");
4289 span_note!(ccx.tcx.sess, sp,
4290 "discriminant type specified here");
4294 ccx.tcx.sess.bug("range_to_inttype: found ReprSimd on an enum");
4296 attr::ReprPacked => {
4297 ccx.tcx.sess.bug("range_to_inttype: found ReprPacked on an enum");
4300 disr_vals.push(current_disr_val);
4304 let hint = *ccx.tcx.lookup_repr_hints(DefId { krate: LOCAL_CRATE, node: id })
4305 .get(0).unwrap_or(&attr::ReprAny);
4307 if hint != attr::ReprAny && vs.len() <= 1 {
4309 span_err!(ccx.tcx.sess, sp, E0083,
4310 "unsupported representation for univariant enum");
4312 span_err!(ccx.tcx.sess, sp, E0084,
4313 "unsupported representation for zero-variant enum");
4317 do_check(ccx, vs, id, hint);
4319 check_representable(ccx.tcx, sp, id, "enum");
4322 // Returns the type parameter count and the type for the given definition.
4323 fn type_scheme_and_predicates_for_def<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4326 -> (TypeScheme<'tcx>, GenericPredicates<'tcx>) {
4328 def::DefLocal(nid) | def::DefUpvar(nid, _, _) => {
4329 let typ = fcx.local_ty(sp, nid);
4330 (ty::TypeScheme { generics: ty::Generics::empty(), ty: typ },
4331 ty::GenericPredicates::empty())
4333 def::DefFn(id, _) | def::DefMethod(id) |
4334 def::DefStatic(id, _) | def::DefVariant(_, id, _) |
4335 def::DefStruct(id) | def::DefConst(id) | def::DefAssociatedConst(id) => {
4336 (fcx.tcx().lookup_item_type(id), fcx.tcx().lookup_predicates(id))
4340 def::DefAssociatedTy(..) |
4342 def::DefTyParam(..) |
4344 def::DefForeignMod(..) |
4346 def::DefRegion(..) |
4348 def::DefSelfTy(..) => {
4349 fcx.ccx.tcx.sess.span_bug(sp, &format!("expected value, found {:?}", defn));
4354 // Instantiates the given path, which must refer to an item with the given
4355 // number of type parameters and type.
4356 pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4357 segments: &[hir::PathSegment],
4358 type_scheme: TypeScheme<'tcx>,
4359 type_predicates: &ty::GenericPredicates<'tcx>,
4360 opt_self_ty: Option<Ty<'tcx>>,
4363 node_id: ast::NodeId) {
4364 debug!("instantiate_path(path={:?}, def={:?}, node_id={}, type_scheme={:?})",
4370 // We need to extract the type parameters supplied by the user in
4371 // the path `path`. Due to the current setup, this is a bit of a
4372 // tricky-process; the problem is that resolve only tells us the
4373 // end-point of the path resolution, and not the intermediate steps.
4374 // Luckily, we can (at least for now) deduce the intermediate steps
4375 // just from the end-point.
4377 // There are basically four cases to consider:
4379 // 1. Reference to a *type*, such as a struct or enum:
4381 // mod a { struct Foo<T> { ... } }
4383 // Because we don't allow types to be declared within one
4384 // another, a path that leads to a type will always look like
4385 // `a::b::Foo<T>` where `a` and `b` are modules. This implies
4386 // that only the final segment can have type parameters, and
4387 // they are located in the TypeSpace.
4389 // *Note:* Generally speaking, references to types don't
4390 // actually pass through this function, but rather the
4391 // `ast_ty_to_ty` function in `astconv`. However, in the case
4392 // of struct patterns (and maybe literals) we do invoke
4393 // `instantiate_path` to get the general type of an instance of
4394 // a struct. (In these cases, there are actually no type
4395 // parameters permitted at present, but perhaps we will allow
4396 // them in the future.)
4398 // 1b. Reference to a enum variant or tuple-like struct:
4400 // struct foo<T>(...)
4401 // enum E<T> { foo(...) }
4403 // In these cases, the parameters are declared in the type
4406 // 2. Reference to a *fn item*:
4410 // In this case, the path will again always have the form
4411 // `a::b::foo::<T>` where only the final segment should have
4412 // type parameters. However, in this case, those parameters are
4413 // declared on a value, and hence are in the `FnSpace`.
4415 // 3. Reference to a *method*:
4417 // impl<A> SomeStruct<A> {
4421 // Here we can have a path like
4422 // `a::b::SomeStruct::<A>::foo::<B>`, in which case parameters
4423 // may appear in two places. The penultimate segment,
4424 // `SomeStruct::<A>`, contains parameters in TypeSpace, and the
4425 // final segment, `foo::<B>` contains parameters in fn space.
4427 // 4. Reference to an *associated const*:
4429 // impl<A> AnotherStruct<A> {
4430 // const FOO: B = BAR;
4433 // The path in this case will look like
4434 // `a::b::AnotherStruct::<A>::FOO`, so the penultimate segment
4435 // only will have parameters in TypeSpace.
4437 // The first step then is to categorize the segments appropriately.
4439 assert!(!segments.is_empty());
4441 let mut ufcs_associated = None;
4442 let mut segment_spaces: Vec<_>;
4444 // Case 1 and 1b. Reference to a *type* or *enum variant*.
4445 def::DefSelfTy(..) |
4446 def::DefStruct(..) |
4447 def::DefVariant(..) |
4449 def::DefAssociatedTy(..) |
4451 def::DefPrimTy(..) |
4452 def::DefTyParam(..) => {
4453 // Everything but the final segment should have no
4454 // parameters at all.
4455 segment_spaces = vec![None; segments.len() - 1];
4456 segment_spaces.push(Some(subst::TypeSpace));
4459 // Case 2. Reference to a top-level value.
4462 def::DefStatic(..) => {
4463 segment_spaces = vec![None; segments.len() - 1];
4464 segment_spaces.push(Some(subst::FnSpace));
4467 // Case 3. Reference to a method.
4468 def::DefMethod(def_id) => {
4469 let container = fcx.tcx().impl_or_trait_item(def_id).container();
4471 ty::TraitContainer(trait_did) => {
4472 callee::check_legal_trait_for_method_call(fcx.ccx, span, trait_did)
4474 ty::ImplContainer(_) => {}
4477 if segments.len() >= 2 {
4478 segment_spaces = vec![None; segments.len() - 2];
4479 segment_spaces.push(Some(subst::TypeSpace));
4480 segment_spaces.push(Some(subst::FnSpace));
4482 // `<T>::method` will end up here, and so can `T::method`.
4483 let self_ty = opt_self_ty.expect("UFCS sugared method missing Self");
4484 segment_spaces = vec![Some(subst::FnSpace)];
4485 ufcs_associated = Some((container, self_ty));
4489 def::DefAssociatedConst(def_id) => {
4490 let container = fcx.tcx().impl_or_trait_item(def_id).container();
4492 ty::TraitContainer(trait_did) => {
4493 callee::check_legal_trait_for_method_call(fcx.ccx, span, trait_did)
4495 ty::ImplContainer(_) => {}
4498 if segments.len() >= 2 {
4499 segment_spaces = vec![None; segments.len() - 2];
4500 segment_spaces.push(Some(subst::TypeSpace));
4501 segment_spaces.push(None);
4503 // `<T>::CONST` will end up here, and so can `T::CONST`.
4504 let self_ty = opt_self_ty.expect("UFCS sugared const missing Self");
4505 segment_spaces = vec![None];
4506 ufcs_associated = Some((container, self_ty));
4510 // Other cases. Various nonsense that really shouldn't show up
4511 // here. If they do, an error will have been reported
4512 // elsewhere. (I hope)
4514 def::DefForeignMod(..) |
4517 def::DefRegion(..) |
4519 def::DefUpvar(..) => {
4520 segment_spaces = vec![None; segments.len()];
4523 assert_eq!(segment_spaces.len(), segments.len());
4525 // In `<T as Trait<A, B>>::method`, `A` and `B` are mandatory, but
4526 // `opt_self_ty` can also be Some for `Foo::method`, where Foo's
4527 // type parameters are not mandatory.
4528 let require_type_space = opt_self_ty.is_some() && ufcs_associated.is_none();
4530 debug!("segment_spaces={:?}", segment_spaces);
4532 // Next, examine the definition, and determine how many type
4533 // parameters we expect from each space.
4534 let type_defs = &type_scheme.generics.types;
4535 let region_defs = &type_scheme.generics.regions;
4537 // Now that we have categorized what space the parameters for each
4538 // segment belong to, let's sort out the parameters that the user
4539 // provided (if any) into their appropriate spaces. We'll also report
4540 // errors if type parameters are provided in an inappropriate place.
4541 let mut substs = Substs::empty();
4542 for (opt_space, segment) in segment_spaces.iter().zip(segments) {
4545 prohibit_type_params(fcx.tcx(), slice::ref_slice(segment));
4549 push_explicit_parameters_from_segment_to_substs(fcx,
4559 if let Some(self_ty) = opt_self_ty {
4560 if type_defs.len(subst::SelfSpace) == 1 {
4561 substs.types.push(subst::SelfSpace, self_ty);
4565 // Now we have to compare the types that the user *actually*
4566 // provided against the types that were *expected*. If the user
4567 // did not provide any types, then we want to substitute inference
4568 // variables. If the user provided some types, we may still need
4569 // to add defaults. If the user provided *too many* types, that's
4571 for &space in &[subst::SelfSpace, subst::TypeSpace, subst::FnSpace] {
4572 adjust_type_parameters(fcx, span, space, type_defs,
4573 require_type_space, &mut substs);
4574 assert_eq!(substs.types.len(space), type_defs.len(space));
4576 adjust_region_parameters(fcx, span, space, region_defs, &mut substs);
4577 assert_eq!(substs.regions().len(space), region_defs.len(space));
4580 // The things we are substituting into the type should not contain
4581 // escaping late-bound regions, and nor should the base type scheme.
4582 assert!(!substs.has_regions_escaping_depth(0));
4583 assert!(!type_scheme.has_escaping_regions());
4585 // Add all the obligations that are required, substituting and
4586 // normalized appropriately.
4587 let bounds = fcx.instantiate_bounds(span, &substs, &type_predicates);
4588 fcx.add_obligations_for_parameters(
4589 traits::ObligationCause::new(span, fcx.body_id, traits::ItemObligation(def.def_id())),
4592 // Substitute the values for the type parameters into the type of
4593 // the referenced item.
4594 let ty_substituted = fcx.instantiate_type_scheme(span, &substs, &type_scheme.ty);
4597 if let Some((ty::ImplContainer(impl_def_id), self_ty)) = ufcs_associated {
4598 // In the case of `Foo<T>::method` and `<Foo<T>>::method`, if `method`
4599 // is inherent, there is no `Self` parameter, instead, the impl needs
4600 // type parameters, which we can infer by unifying the provided `Self`
4601 // with the substituted impl type.
4602 let impl_scheme = fcx.tcx().lookup_item_type(impl_def_id);
4603 assert_eq!(substs.types.len(subst::TypeSpace),
4604 impl_scheme.generics.types.len(subst::TypeSpace));
4605 assert_eq!(substs.regions().len(subst::TypeSpace),
4606 impl_scheme.generics.regions.len(subst::TypeSpace));
4608 let impl_ty = fcx.instantiate_type_scheme(span, &substs, &impl_scheme.ty);
4609 if fcx.mk_subty(false, infer::Misc(span), self_ty, impl_ty).is_err() {
4610 fcx.tcx().sess.span_bug(span,
4612 "instantiate_path: (UFCS) {:?} was a subtype of {:?} but now is not?",
4618 debug!("instantiate_path: type of {:?} is {:?}",
4621 fcx.write_ty(node_id, ty_substituted);
4622 fcx.write_substs(node_id, ty::ItemSubsts { substs: substs });
4625 /// Finds the parameters that the user provided and adds them to `substs`. If too many
4626 /// parameters are provided, then reports an error and clears the output vector.
4628 /// We clear the output vector because that will cause the `adjust_XXX_parameters()` later to
4629 /// use inference variables. This seems less likely to lead to derived errors.
4631 /// Note that we *do not* check for *too few* parameters here. Due to the presence of defaults
4632 /// etc that is more complicated. I wanted however to do the reporting of *too many* parameters
4633 /// here because we can easily use the precise span of the N+1'th parameter.
4634 fn push_explicit_parameters_from_segment_to_substs<'a, 'tcx>(
4635 fcx: &FnCtxt<'a, 'tcx>,
4636 space: subst::ParamSpace,
4638 type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4639 region_defs: &VecPerParamSpace<ty::RegionParameterDef>,
4640 segment: &hir::PathSegment,
4641 substs: &mut Substs<'tcx>)
4643 match segment.parameters {
4644 hir::AngleBracketedParameters(ref data) => {
4645 push_explicit_angle_bracketed_parameters_from_segment_to_substs(
4646 fcx, space, type_defs, region_defs, data, substs);
4649 hir::ParenthesizedParameters(ref data) => {
4650 span_err!(fcx.tcx().sess, span, E0238,
4651 "parenthesized parameters may only be used with a trait");
4652 push_explicit_parenthesized_parameters_from_segment_to_substs(
4653 fcx, space, span, type_defs, data, substs);
4658 fn push_explicit_angle_bracketed_parameters_from_segment_to_substs<'a, 'tcx>(
4659 fcx: &FnCtxt<'a, 'tcx>,
4660 space: subst::ParamSpace,
4661 type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4662 region_defs: &VecPerParamSpace<ty::RegionParameterDef>,
4663 data: &hir::AngleBracketedParameterData,
4664 substs: &mut Substs<'tcx>)
4667 let type_count = type_defs.len(space);
4668 assert_eq!(substs.types.len(space), 0);
4669 for (i, typ) in data.types.iter().enumerate() {
4670 let t = fcx.to_ty(&**typ);
4672 substs.types.push(space, t);
4673 } else if i == type_count {
4674 span_err!(fcx.tcx().sess, typ.span, E0087,
4675 "too many type parameters provided: \
4676 expected at most {} parameter{}, \
4677 found {} parameter{}",
4679 if type_count == 1 {""} else {"s"},
4681 if data.types.len() == 1 {""} else {"s"});
4682 substs.types.truncate(space, 0);
4688 if !data.bindings.is_empty() {
4689 span_err!(fcx.tcx().sess, data.bindings[0].span, E0182,
4690 "unexpected binding of associated item in expression path \
4691 (only allowed in type paths)");
4695 let region_count = region_defs.len(space);
4696 assert_eq!(substs.regions().len(space), 0);
4697 for (i, lifetime) in data.lifetimes.iter().enumerate() {
4698 let r = ast_region_to_region(fcx.tcx(), lifetime);
4699 if i < region_count {
4700 substs.mut_regions().push(space, r);
4701 } else if i == region_count {
4702 span_err!(fcx.tcx().sess, lifetime.span, E0088,
4703 "too many lifetime parameters provided: \
4704 expected {} parameter{}, found {} parameter{}",
4706 if region_count == 1 {""} else {"s"},
4707 data.lifetimes.len(),
4708 if data.lifetimes.len() == 1 {""} else {"s"});
4709 substs.mut_regions().truncate(space, 0);
4717 /// `push_explicit_angle_bracketed_parameters_from_segment_to_substs`,
4718 /// but intended for `Foo(A,B) -> C` form. This expands to
4719 /// roughly the same thing as `Foo<(A,B),C>`. One important
4720 /// difference has to do with the treatment of anonymous
4721 /// regions, which are translated into bound regions (NYI).
4722 fn push_explicit_parenthesized_parameters_from_segment_to_substs<'a, 'tcx>(
4723 fcx: &FnCtxt<'a, 'tcx>,
4724 space: subst::ParamSpace,
4726 type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4727 data: &hir::ParenthesizedParameterData,
4728 substs: &mut Substs<'tcx>)
4730 let type_count = type_defs.len(space);
4732 span_err!(fcx.tcx().sess, span, E0167,
4733 "parenthesized form always supplies 2 type parameters, \
4734 but only {} parameter(s) were expected",
4738 let input_tys: Vec<Ty> =
4739 data.inputs.iter().map(|ty| fcx.to_ty(&**ty)).collect();
4741 let tuple_ty = fcx.tcx().mk_tup(input_tys);
4743 if type_count >= 1 {
4744 substs.types.push(space, tuple_ty);
4747 let output_ty: Option<Ty> =
4748 data.output.as_ref().map(|ty| fcx.to_ty(&**ty));
4751 output_ty.unwrap_or(fcx.tcx().mk_nil());
4753 if type_count >= 2 {
4754 substs.types.push(space, output_ty);
4758 fn adjust_type_parameters<'a, 'tcx>(
4759 fcx: &FnCtxt<'a, 'tcx>,
4762 defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4763 require_type_space: bool,
4764 substs: &mut Substs<'tcx>)
4766 let provided_len = substs.types.len(space);
4767 let desired = defs.get_slice(space);
4768 let required_len = desired.iter()
4769 .take_while(|d| d.default.is_none())
4772 debug!("adjust_type_parameters(space={:?}, \
4781 // Enforced by `push_explicit_parameters_from_segment_to_substs()`.
4782 assert!(provided_len <= desired.len());
4784 // Nothing specified at all: supply inference variables for
4786 if provided_len == 0 && !(require_type_space && space == subst::TypeSpace) {
4787 substs.types.replace(space, Vec::new());
4788 fcx.infcx().type_vars_for_defs(span, space, substs, &desired[..]);
4792 // Too few parameters specified: report an error and use Err
4794 if provided_len < required_len {
4796 if desired.len() != required_len { "at least " } else { "" };
4797 span_err!(fcx.tcx().sess, span, E0089,
4798 "too few type parameters provided: expected {}{} parameter{}, \
4799 found {} parameter{}",
4800 qualifier, required_len,
4801 if required_len == 1 {""} else {"s"},
4803 if provided_len == 1 {""} else {"s"});
4804 substs.types.replace(space, vec![fcx.tcx().types.err; desired.len()]);
4808 // Otherwise, add in any optional parameters that the user
4809 // omitted. The case of *too many* parameters is handled
4811 // push_explicit_parameters_from_segment_to_substs(). Note
4812 // that the *default* type are expressed in terms of all prior
4813 // parameters, so we have to substitute as we go with the
4814 // partial substitution that we have built up.
4815 for i in provided_len..desired.len() {
4816 let default = desired[i].default.unwrap();
4817 let default = default.subst_spanned(fcx.tcx(), substs, Some(span));
4818 substs.types.push(space, default);
4820 assert_eq!(substs.types.len(space), desired.len());
4822 debug!("Final substs: {:?}", substs);
4825 fn adjust_region_parameters(
4829 defs: &VecPerParamSpace<ty::RegionParameterDef>,
4830 substs: &mut Substs)
4832 let provided_len = substs.mut_regions().len(space);
4833 let desired = defs.get_slice(space);
4835 // Enforced by `push_explicit_parameters_from_segment_to_substs()`.
4836 assert!(provided_len <= desired.len());
4838 // If nothing was provided, just use inference variables.
4839 if provided_len == 0 {
4840 substs.mut_regions().replace(
4842 fcx.infcx().region_vars_for_defs(span, desired));
4846 // If just the right number were provided, everybody is happy.
4847 if provided_len == desired.len() {
4851 // Otherwise, too few were provided. Report an error and then
4852 // use inference variables.
4853 span_err!(fcx.tcx().sess, span, E0090,
4854 "too few lifetime parameters provided: expected {} parameter{}, \
4855 found {} parameter{}",
4857 if desired.len() == 1 {""} else {"s"},
4859 if provided_len == 1 {""} else {"s"});
4861 substs.mut_regions().replace(
4863 fcx.infcx().region_vars_for_defs(span, desired));
4867 fn structurally_resolve_type_or_else<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
4871 where F: Fn() -> Ty<'tcx>
4873 let mut ty = fcx.resolve_type_vars_if_possible(ty);
4876 let alternative = f();
4879 if alternative.is_ty_var() || alternative.references_error() {
4880 fcx.type_error_message(sp, |_actual| {
4881 "the type of this value must be known in this context".to_string()
4883 demand::suptype(fcx, sp, fcx.tcx().types.err, ty);
4884 ty = fcx.tcx().types.err;
4886 demand::suptype(fcx, sp, alternative, ty);
4894 // Resolves `typ` by a single level if `typ` is a type variable. If no
4895 // resolution is possible, then an error is reported.
4896 pub fn structurally_resolved_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4901 structurally_resolve_type_or_else(fcx, sp, ty, || {
4906 // Returns true if b contains a break that can exit from b
4907 pub fn may_break(cx: &ty::ctxt, id: ast::NodeId, b: &hir::Block) -> bool {
4908 // First: is there an unlabeled break immediately
4910 (loop_query(&*b, |e| {
4912 hir::ExprBreak(None) => true,
4916 // Second: is there a labeled break with label
4917 // <id> nested anywhere inside the loop?
4918 (block_query(b, |e| {
4919 if let hir::ExprBreak(Some(_)) = e.node {
4920 lookup_full_def(cx, e.span, e.id) == def::DefLabel(id)
4927 pub fn check_bounds_are_used<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
4929 tps: &OwnedSlice<hir::TyParam>,
4931 debug!("check_bounds_are_used(n_tps={}, ty={:?})",
4934 // make a vector of booleans initially false, set to true when used
4935 if tps.is_empty() { return; }
4936 let mut tps_used = vec![false; tps.len()];
4938 for leaf_ty in ty.walk() {
4939 if let ty::TyParam(ParamTy {idx, ..}) = leaf_ty.sty {
4940 debug!("Found use of ty param num {}", idx);
4941 tps_used[idx as usize] = true;
4945 for (i, b) in tps_used.iter().enumerate() {
4947 span_err!(ccx.tcx.sess, span, E0091,
4948 "type parameter `{}` is unused",