1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
15 Within the check phase of type check, we check each item one at a time
16 (bodies of function expressions are checked as part of the containing
17 function). Inference is used to supply types wherever they are
20 By far the most complex case is checking the body of a function. This
21 can be broken down into several distinct phases:
23 - gather: creates type variables to represent the type of each local
24 variable and pattern binding.
26 - main: the main pass does the lion's share of the work: it
27 determines the types of all expressions, resolves
28 methods, checks for most invalid conditions, and so forth. In
29 some cases, where a type is unknown, it may create a type or region
30 variable and use that as the type of an expression.
32 In the process of checking, various constraints will be placed on
33 these type variables through the subtyping relationships requested
34 through the `demand` module. The `infer` module is in charge
35 of resolving those constraints.
37 - regionck: after main is complete, the regionck pass goes over all
38 types looking for regions and making sure that they did not escape
39 into places they are not in scope. This may also influence the
40 final assignments of the various region variables if there is some
43 - vtable: find and records the impls to use for each trait bound that
44 appears on a type parameter.
46 - writeback: writes the final types within a function body, replacing
47 type variables with their final inferred types. These final types
48 are written into the `tcx.node_types` table, which should *never* contain
49 any reference to a type variable.
53 While type checking a function, the intermediate types for the
54 expressions, blocks, and so forth contained within the function are
55 stored in `fcx.node_types` and `fcx.item_substs`. These types
56 may contain unresolved type variables. After type checking is
57 complete, the functions in the writeback module are used to take the
58 types from this table, resolve them, and then write them into their
59 permanent home in the type context `ccx.tcx`.
61 This means that during inferencing you should use `fcx.write_ty()`
62 and `fcx.expr_ty()` / `fcx.node_ty()` to write/obtain the types of
63 nodes within the function.
65 The types of top-level items, which never contain unbound type
66 variables, are stored directly into the `tcx` tables.
68 n.b.: A type variable is not the same thing as a type parameter. A
69 type variable is rather an "instance" of a type parameter: that is,
70 given a generic function `fn foo<T>(t: T)`: while checking the
71 function `foo`, the type `ty_param(0)` refers to the type `T`, which
72 is treated in abstract. When `foo()` is called, however, `T` will be
73 substituted for a fresh type variable `N`. This variable will
74 eventually be resolved to some concrete type (which might itself be
79 pub use self::Expectation::*;
80 pub use self::compare_method::{compare_impl_method, compare_const_impl};
81 use self::TupleArgumentsFlag::*;
83 use astconv::{self, ast_region_to_region, ast_ty_to_ty, AstConv, PathParamMode};
84 use check::_match::pat_ctxt;
85 use dep_graph::DepNode;
86 use fmt_macros::{Parser, Piece, Position};
87 use middle::astconv_util::prohibit_type_params;
88 use middle::cstore::LOCAL_CRATE;
90 use middle::def_id::DefId;
92 use middle::infer::{TypeOrigin, type_variable};
93 use middle::pat_util::{self, pat_id_map};
94 use middle::privacy::{AllPublic, LastMod};
95 use middle::subst::{self, Subst, Substs, VecPerParamSpace, ParamSpace, TypeSpace};
96 use middle::traits::{self, report_fulfillment_errors};
97 use middle::ty::{GenericPredicates, TypeScheme};
98 use middle::ty::{Disr, ParamTy, ParameterEnvironment};
99 use middle::ty::{LvaluePreference, NoPreference, PreferMutLvalue};
100 use middle::ty::{self, ToPolyTraitRef, Ty};
101 use middle::ty::{MethodCall, MethodCallee};
102 use middle::ty::adjustment;
103 use middle::ty::error::TypeError;
104 use middle::ty::fold::{TypeFolder, TypeFoldable};
105 use middle::ty::util::Representability;
106 use require_c_abi_if_variadic;
107 use rscope::{ElisionFailureInfo, RegionScope};
108 use session::Session;
109 use {CrateCtxt, lookup_full_def};
112 use util::common::{block_query, ErrorReported, indenter, loop_query};
113 use util::nodemap::{DefIdMap, FnvHashMap, NodeMap};
115 use std::cell::{Cell, Ref, RefCell};
116 use std::collections::{HashSet};
117 use std::mem::replace;
121 use syntax::attr::AttrMetaMethods;
122 use syntax::codemap::{self, Span, Spanned};
123 use syntax::errors::DiagnosticBuilder;
124 use syntax::parse::token::{self, InternedString};
126 use syntax::util::lev_distance::find_best_match_for_name;
128 use rustc_front::intravisit::{self, Visitor};
129 use rustc_front::hir;
130 use rustc_front::hir::Visibility;
131 use rustc_front::print::pprust;
132 use rustc_back::slice;
151 /// closures defined within the function. For example:
154 /// bar(move|| { ... })
157 /// Here, the function `foo()` and the closure passed to
158 /// `bar()` will each have their own `FnCtxt`, but they will
159 /// share the inherited fields.
160 pub struct Inherited<'a, 'tcx: 'a> {
161 infcx: infer::InferCtxt<'a, 'tcx>,
162 locals: RefCell<NodeMap<Ty<'tcx>>>,
164 tables: &'a RefCell<ty::Tables<'tcx>>,
166 // When we process a call like `c()` where `c` is a closure type,
167 // we may not have decided yet whether `c` is a `Fn`, `FnMut`, or
168 // `FnOnce` closure. In that case, we defer full resolution of the
169 // call until upvar inference can kick in and make the
170 // decision. We keep these deferred resolutions grouped by the
171 // def-id of the closure, so that once we decide, we can easily go
172 // back and process them.
173 deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolutionHandler<'tcx>>>>,
175 deferred_cast_checks: RefCell<Vec<cast::CastCheck<'tcx>>>,
178 trait DeferredCallResolution<'tcx> {
179 fn resolve<'a>(&mut self, fcx: &FnCtxt<'a,'tcx>);
182 type DeferredCallResolutionHandler<'tcx> = Box<DeferredCallResolution<'tcx>+'tcx>;
184 /// When type-checking an expression, we propagate downward
185 /// whatever type hint we are able in the form of an `Expectation`.
186 #[derive(Copy, Clone, Debug)]
187 pub enum Expectation<'tcx> {
188 /// We know nothing about what type this expression should have.
191 /// This expression should have the type given (or some subtype)
192 ExpectHasType(Ty<'tcx>),
194 /// This expression will be cast to the `Ty`
195 ExpectCastableToType(Ty<'tcx>),
197 /// This rvalue expression will be wrapped in `&` or `Box` and coerced
198 /// to `&Ty` or `Box<Ty>`, respectively. `Ty` is `[A]` or `Trait`.
199 ExpectRvalueLikeUnsized(Ty<'tcx>),
202 impl<'tcx> Expectation<'tcx> {
203 // Disregard "castable to" expectations because they
204 // can lead us astray. Consider for example `if cond
205 // {22} else {c} as u8` -- if we propagate the
206 // "castable to u8" constraint to 22, it will pick the
207 // type 22u8, which is overly constrained (c might not
208 // be a u8). In effect, the problem is that the
209 // "castable to" expectation is not the tightest thing
210 // we can say, so we want to drop it in this case.
211 // The tightest thing we can say is "must unify with
212 // else branch". Note that in the case of a "has type"
213 // constraint, this limitation does not hold.
215 // If the expected type is just a type variable, then don't use
216 // an expected type. Otherwise, we might write parts of the type
217 // when checking the 'then' block which are incompatible with the
219 fn adjust_for_branches<'a>(&self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> {
221 ExpectHasType(ety) => {
222 let ety = fcx.infcx().shallow_resolve(ety);
223 if !ety.is_ty_var() {
229 ExpectRvalueLikeUnsized(ety) => {
230 ExpectRvalueLikeUnsized(ety)
237 #[derive(Copy, Clone)]
238 pub struct UnsafetyState {
239 pub def: ast::NodeId,
240 pub unsafety: hir::Unsafety,
241 pub unsafe_push_count: u32,
246 pub fn function(unsafety: hir::Unsafety, def: ast::NodeId) -> UnsafetyState {
247 UnsafetyState { def: def, unsafety: unsafety, unsafe_push_count: 0, from_fn: true }
250 pub fn recurse(&mut self, blk: &hir::Block) -> UnsafetyState {
251 match self.unsafety {
252 // If this unsafe, then if the outer function was already marked as
253 // unsafe we shouldn't attribute the unsafe'ness to the block. This
254 // way the block can be warned about instead of ignoring this
255 // extraneous block (functions are never warned about).
256 hir::Unsafety::Unsafe if self.from_fn => *self,
259 let (unsafety, def, count) = match blk.rules {
260 hir::PushUnsafeBlock(..) =>
261 (unsafety, blk.id, self.unsafe_push_count.checked_add(1).unwrap()),
262 hir::PopUnsafeBlock(..) =>
263 (unsafety, blk.id, self.unsafe_push_count.checked_sub(1).unwrap()),
264 hir::UnsafeBlock(..) =>
265 (hir::Unsafety::Unsafe, blk.id, self.unsafe_push_count),
266 hir::DefaultBlock | hir::PushUnstableBlock | hir:: PopUnstableBlock =>
267 (unsafety, self.def, self.unsafe_push_count),
269 UnsafetyState{ def: def,
271 unsafe_push_count: count,
279 pub struct FnCtxt<'a, 'tcx: 'a> {
280 body_id: ast::NodeId,
282 // This flag is set to true if, during the writeback phase, we encounter
283 // a type error in this function.
284 writeback_errors: Cell<bool>,
286 // Number of errors that had been reported when we started
287 // checking this function. On exit, if we find that *more* errors
288 // have been reported, we will skip regionck and other work that
289 // expects the types within the function to be consistent.
290 err_count_on_creation: usize,
292 ret_ty: ty::FnOutput<'tcx>,
294 ps: RefCell<UnsafetyState>,
296 inh: &'a Inherited<'a, 'tcx>,
298 ccx: &'a CrateCtxt<'a, 'tcx>,
301 impl<'a, 'tcx> Inherited<'a, 'tcx> {
302 fn new(tcx: &'a ty::ctxt<'tcx>,
303 tables: &'a RefCell<ty::Tables<'tcx>>,
304 param_env: ty::ParameterEnvironment<'a, 'tcx>)
305 -> Inherited<'a, 'tcx> {
308 infcx: infer::new_infer_ctxt(tcx, tables, Some(param_env)),
309 locals: RefCell::new(NodeMap()),
311 deferred_call_resolutions: RefCell::new(DefIdMap()),
312 deferred_cast_checks: RefCell::new(Vec::new()),
316 fn normalize_associated_types_in<T>(&self,
318 body_id: ast::NodeId,
321 where T : TypeFoldable<'tcx>
323 let mut fulfillment_cx = self.infcx.fulfillment_cx.borrow_mut();
324 assoc::normalize_associated_types_in(&self.infcx,
333 // Used by check_const and check_enum_variants
334 pub fn blank_fn_ctxt<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
335 inh: &'a Inherited<'a, 'tcx>,
336 rty: ty::FnOutput<'tcx>,
337 body_id: ast::NodeId)
338 -> FnCtxt<'a, 'tcx> {
341 writeback_errors: Cell::new(false),
342 err_count_on_creation: ccx.tcx.sess.err_count(),
344 ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, 0)),
350 fn static_inherited_fields<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
351 tables: &'a RefCell<ty::Tables<'tcx>>)
352 -> Inherited<'a, 'tcx> {
353 // It's kind of a kludge to manufacture a fake function context
354 // and statement context, but we might as well do write the code only once
355 let param_env = ccx.tcx.empty_parameter_environment();
356 Inherited::new(ccx.tcx, &tables, param_env)
359 struct CheckItemTypesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
360 struct CheckItemBodiesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
362 impl<'a, 'tcx> Visitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> {
363 fn visit_item(&mut self, i: &'tcx hir::Item) {
364 check_item_type(self.ccx, i);
365 intravisit::walk_item(self, i);
368 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
370 hir::TyFixedLengthVec(_, ref expr) => {
371 check_const_in_type(self.ccx, &**expr, self.ccx.tcx.types.usize);
376 intravisit::walk_ty(self, t);
380 impl<'a, 'tcx> Visitor<'tcx> for CheckItemBodiesVisitor<'a, 'tcx> {
381 fn visit_item(&mut self, i: &'tcx hir::Item) {
382 check_item_body(self.ccx, i);
386 pub fn check_wf_new(ccx: &CrateCtxt) {
387 ccx.tcx.sess.abort_if_new_errors(|| {
388 let mut visit = wfcheck::CheckTypeWellFormedVisitor::new(ccx);
389 ccx.tcx.visit_all_items_in_krate(DepNode::WfCheck, &mut visit);
393 pub fn check_item_types(ccx: &CrateCtxt) {
394 ccx.tcx.sess.abort_if_new_errors(|| {
395 let mut visit = CheckItemTypesVisitor { ccx: ccx };
396 ccx.tcx.visit_all_items_in_krate(DepNode::TypeckItemType, &mut visit);
400 pub fn check_item_bodies(ccx: &CrateCtxt) {
401 ccx.tcx.sess.abort_if_new_errors(|| {
402 let mut visit = CheckItemBodiesVisitor { ccx: ccx };
403 ccx.tcx.visit_all_items_in_krate(DepNode::TypeckItemBody, &mut visit);
407 pub fn check_drop_impls(ccx: &CrateCtxt) {
408 ccx.tcx.sess.abort_if_new_errors(|| {
409 let _task = ccx.tcx.dep_graph.in_task(DepNode::Dropck);
410 let drop_trait = match ccx.tcx.lang_items.drop_trait() {
411 Some(id) => ccx.tcx.lookup_trait_def(id), None => { return }
413 drop_trait.for_each_impl(ccx.tcx, |drop_impl_did| {
414 let _task = ccx.tcx.dep_graph.in_task(DepNode::DropckImpl(drop_impl_did));
415 if drop_impl_did.is_local() {
416 match dropck::check_drop_impl(ccx.tcx, drop_impl_did) {
419 assert!(ccx.tcx.sess.has_errors());
427 fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
428 decl: &'tcx hir::FnDecl,
429 body: &'tcx hir::Block,
433 param_env: ty::ParameterEnvironment<'a, 'tcx>)
436 ty::TyBareFn(_, ref fn_ty) => {
437 let tables = RefCell::new(ty::Tables::empty());
438 let inh = Inherited::new(ccx.tcx, &tables, param_env);
440 // Compute the fty from point of view of inside fn.
441 let fn_scope = ccx.tcx.region_maps.call_site_extent(fn_id, body.id);
443 fn_ty.sig.subst(ccx.tcx, &inh.infcx.parameter_environment.free_substs);
445 ccx.tcx.liberate_late_bound_regions(fn_scope, &fn_sig);
447 inh.normalize_associated_types_in(body.span,
451 let fcx = check_fn(ccx, fn_ty.unsafety, fn_id, &fn_sig,
452 decl, fn_id, body, &inh);
454 fcx.select_all_obligations_and_apply_defaults();
455 upvar::closure_analyze_fn(&fcx, fn_id, decl, body);
456 fcx.select_obligations_where_possible();
458 fcx.select_all_obligations_or_error(); // Casts can introduce new obligations.
460 regionck::regionck_fn(&fcx, fn_id, fn_span, decl, body);
461 writeback::resolve_type_vars_in_fn(&fcx, decl, body);
463 _ => ccx.tcx.sess.impossible_case(body.span,
464 "check_bare_fn: function type expected")
468 struct GatherLocalsVisitor<'a, 'tcx: 'a> {
469 fcx: &'a FnCtxt<'a, 'tcx>
472 impl<'a, 'tcx> GatherLocalsVisitor<'a, 'tcx> {
473 fn assign(&mut self, _span: Span, nid: ast::NodeId, ty_opt: Option<Ty<'tcx>>) -> Ty<'tcx> {
476 // infer the variable's type
477 let var_ty = self.fcx.infcx().next_ty_var();
478 self.fcx.inh.locals.borrow_mut().insert(nid, var_ty);
482 // take type that the user specified
483 self.fcx.inh.locals.borrow_mut().insert(nid, typ);
490 impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> {
491 // Add explicitly-declared locals.
492 fn visit_local(&mut self, local: &'tcx hir::Local) {
493 let o_ty = match local.ty {
494 Some(ref ty) => Some(self.fcx.to_ty(&**ty)),
497 self.assign(local.span, local.id, o_ty);
498 debug!("Local variable {:?} is assigned type {}",
500 self.fcx.infcx().ty_to_string(
501 self.fcx.inh.locals.borrow().get(&local.id).unwrap().clone()));
502 intravisit::walk_local(self, local);
505 // Add pattern bindings.
506 fn visit_pat(&mut self, p: &'tcx hir::Pat) {
507 if let hir::PatIdent(_, ref path1, _) = p.node {
508 if pat_util::pat_is_binding(&self.fcx.ccx.tcx.def_map.borrow(), p) {
509 let var_ty = self.assign(p.span, p.id, None);
511 self.fcx.require_type_is_sized(var_ty, p.span,
512 traits::VariableType(p.id));
514 debug!("Pattern binding {} is assigned to {} with type {:?}",
516 self.fcx.infcx().ty_to_string(
517 self.fcx.inh.locals.borrow().get(&p.id).unwrap().clone()),
521 intravisit::walk_pat(self, p);
524 fn visit_block(&mut self, b: &'tcx hir::Block) {
525 // non-obvious: the `blk` variable maps to region lb, so
526 // we have to keep this up-to-date. This
527 // is... unfortunate. It'd be nice to not need this.
528 intravisit::walk_block(self, b);
531 // Since an expr occurs as part of the type fixed size arrays we
532 // need to record the type for that node
533 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
535 hir::TyFixedLengthVec(ref ty, ref count_expr) => {
536 self.visit_ty(&**ty);
537 check_expr_with_hint(self.fcx, &**count_expr, self.fcx.tcx().types.usize);
539 hir::TyBareFn(ref function_declaration) => {
540 intravisit::walk_fn_decl_nopat(self, &function_declaration.decl);
541 walk_list!(self, visit_lifetime_def, &function_declaration.lifetimes);
543 _ => intravisit::walk_ty(self, t)
547 // Don't descend into the bodies of nested closures
548 fn visit_fn(&mut self, _: intravisit::FnKind<'tcx>, _: &'tcx hir::FnDecl,
549 _: &'tcx hir::Block, _: Span, _: ast::NodeId) { }
552 /// Helper used by check_bare_fn and check_expr_fn. Does the grungy work of checking a function
553 /// body and returns the function context used for that purpose, since in the case of a fn item
554 /// there is still a bit more to do.
557 /// * inherited: other fields inherited from the enclosing fn (if any)
558 fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
559 unsafety: hir::Unsafety,
560 unsafety_id: ast::NodeId,
561 fn_sig: &ty::FnSig<'tcx>,
562 decl: &'tcx hir::FnDecl,
564 body: &'tcx hir::Block,
565 inherited: &'a Inherited<'a, 'tcx>)
569 let err_count_on_creation = tcx.sess.err_count();
571 let arg_tys = &fn_sig.inputs;
572 let ret_ty = fn_sig.output;
574 debug!("check_fn(arg_tys={:?}, ret_ty={:?}, fn_id={})",
579 // Create the function context. This is either derived from scratch or,
580 // in the case of function expressions, based on the outer context.
583 writeback_errors: Cell::new(false),
584 err_count_on_creation: err_count_on_creation,
586 ps: RefCell::new(UnsafetyState::function(unsafety, unsafety_id)),
591 if let ty::FnConverging(ret_ty) = ret_ty {
592 fcx.require_type_is_sized(ret_ty, decl.output.span(), traits::ReturnType);
595 debug!("fn-sig-map: fn_id={} fn_sig={:?}", fn_id, fn_sig);
597 inherited.tables.borrow_mut().liberated_fn_sigs.insert(fn_id, fn_sig.clone());
600 let mut visit = GatherLocalsVisitor { fcx: &fcx, };
602 // Add formal parameters.
603 for (arg_ty, input) in arg_tys.iter().zip(&decl.inputs) {
604 // The type of the argument must be well-formed.
606 // NB -- this is now checked in wfcheck, but that
607 // currently only results in warnings, so we issue an
608 // old-style WF obligation here so that we still get the
609 // errors that we used to get.
610 fcx.register_old_wf_obligation(arg_ty, input.ty.span, traits::MiscObligation);
612 // Create type variables for each argument.
613 pat_util::pat_bindings(
616 |_bm, pat_id, sp, _path| {
617 let var_ty = visit.assign(sp, pat_id, None);
618 fcx.require_type_is_sized(var_ty, sp,
619 traits::VariableType(pat_id));
622 // Check the pattern.
625 map: pat_id_map(&tcx.def_map, &*input.pat),
627 _match::check_pat(&pcx, &*input.pat, *arg_ty);
630 visit.visit_block(body);
633 check_block_with_expected(&fcx, body, match ret_ty {
634 ty::FnConverging(result_type) => ExpectHasType(result_type),
635 ty::FnDiverging => NoExpectation
638 for (input, arg) in decl.inputs.iter().zip(arg_tys) {
639 fcx.write_ty(input.id, arg);
645 pub fn check_struct(ccx: &CrateCtxt, id: ast::NodeId, span: Span) {
648 check_representable(tcx, span, id, "struct");
650 if tcx.lookup_simd(ccx.tcx.map.local_def_id(id)) {
651 check_simd(tcx, span, id);
655 pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
656 debug!("check_item_type(it.id={}, it.name={})",
658 ccx.tcx.item_path_str(ccx.tcx.map.local_def_id(it.id)));
659 let _indenter = indenter();
661 // Consts can play a role in type-checking, so they are included here.
662 hir::ItemStatic(_, _, ref e) |
663 hir::ItemConst(_, ref e) => check_const(ccx, it.span, &**e, it.id),
664 hir::ItemEnum(ref enum_definition, _) => {
665 check_enum_variants(ccx,
667 &enum_definition.variants,
670 hir::ItemFn(..) => {} // entirely within check_item_body
671 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
672 debug!("ItemImpl {} with id {}", it.name, it.id);
673 match ccx.tcx.impl_trait_ref(ccx.tcx.map.local_def_id(it.id)) {
674 Some(impl_trait_ref) => {
675 check_impl_items_against_trait(ccx,
683 hir::ItemTrait(_, ref generics, _, _) => {
684 check_trait_on_unimplemented(ccx, generics, it);
686 hir::ItemStruct(..) => {
687 check_struct(ccx, it.id, it.span);
689 hir::ItemTy(_, ref generics) => {
690 let pty_ty = ccx.tcx.node_id_to_type(it.id);
691 check_bounds_are_used(ccx, &generics.ty_params, pty_ty);
693 hir::ItemForeignMod(ref m) => {
694 if m.abi == abi::RustIntrinsic {
695 for item in &m.items {
696 intrinsic::check_intrinsic_type(ccx, item);
698 } else if m.abi == abi::PlatformIntrinsic {
699 for item in &m.items {
700 intrinsic::check_platform_intrinsic_type(ccx, item);
703 for item in &m.items {
704 let pty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(item.id));
705 if !pty.generics.types.is_empty() {
706 let mut err = struct_span_err!(ccx.tcx.sess, item.span, E0044,
707 "foreign items may not have type parameters");
708 span_help!(&mut err, item.span,
709 "consider using specialization instead of \
714 if let hir::ForeignItemFn(ref fn_decl, _) = item.node {
715 require_c_abi_if_variadic(ccx.tcx, fn_decl, m.abi, item.span);
720 _ => {/* nothing to do */ }
724 pub fn check_item_body<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
725 debug!("check_item_body(it.id={}, it.name={})",
727 ccx.tcx.item_path_str(ccx.tcx.map.local_def_id(it.id)));
728 let _indenter = indenter();
730 hir::ItemFn(ref decl, _, _, _, _, ref body) => {
731 let fn_pty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(it.id));
732 let param_env = ParameterEnvironment::for_item(ccx.tcx, it.id);
733 check_bare_fn(ccx, &**decl, &**body, it.id, it.span, fn_pty.ty, param_env);
735 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
736 debug!("ItemImpl {} with id {}", it.name, it.id);
738 let impl_pty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(it.id));
740 for impl_item in impl_items {
741 match impl_item.node {
742 hir::ImplItemKind::Const(_, ref expr) => {
743 check_const(ccx, impl_item.span, &*expr, impl_item.id)
745 hir::ImplItemKind::Method(ref sig, ref body) => {
746 check_method_body(ccx, &impl_pty.generics, sig, body,
747 impl_item.id, impl_item.span);
749 hir::ImplItemKind::Type(_) => {
750 // Nothing to do here.
755 hir::ItemTrait(_, _, _, ref trait_items) => {
756 let trait_def = ccx.tcx.lookup_trait_def(ccx.tcx.map.local_def_id(it.id));
757 for trait_item in trait_items {
758 match trait_item.node {
759 hir::ConstTraitItem(_, Some(ref expr)) => {
760 check_const(ccx, trait_item.span, &*expr, trait_item.id)
762 hir::MethodTraitItem(ref sig, Some(ref body)) => {
763 check_trait_fn_not_const(ccx, trait_item.span, sig.constness);
765 check_method_body(ccx, &trait_def.generics, sig, body,
766 trait_item.id, trait_item.span);
768 hir::MethodTraitItem(ref sig, None) => {
769 check_trait_fn_not_const(ccx, trait_item.span, sig.constness);
771 hir::ConstTraitItem(_, None) |
772 hir::TypeTraitItem(..) => {
778 _ => {/* nothing to do */ }
782 fn check_trait_fn_not_const<'a,'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
784 constness: hir::Constness)
787 hir::Constness::NotConst => {
790 hir::Constness::Const => {
791 span_err!(ccx.tcx.sess, span, E0379, "trait fns cannot be declared const");
796 fn check_trait_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
797 generics: &hir::Generics,
799 if let Some(ref attr) = item.attrs.iter().find(|a| {
800 a.check_name("rustc_on_unimplemented")
802 if let Some(ref istring) = attr.value_str() {
803 let parser = Parser::new(&istring);
804 let types = &*generics.ty_params;
805 for token in parser {
807 Piece::String(_) => (), // Normal string, no need to check it
808 Piece::NextArgument(a) => match a.position {
809 // `{Self}` is allowed
810 Position::ArgumentNamed(s) if s == "Self" => (),
811 // So is `{A}` if A is a type parameter
812 Position::ArgumentNamed(s) => match types.iter().find(|t| {
817 span_err!(ccx.tcx.sess, attr.span, E0230,
818 "there is no type parameter \
823 // `{:1}` and `{}` are not to be used
824 Position::ArgumentIs(_) | Position::ArgumentNext => {
825 span_err!(ccx.tcx.sess, attr.span, E0231,
826 "only named substitution \
827 parameters are allowed");
833 span_err!(ccx.tcx.sess, attr.span, E0232,
834 "this attribute must have a value, \
835 eg `#[rustc_on_unimplemented = \"foo\"]`")
840 /// Type checks a method body.
844 /// * `item_generics`: generics defined on the impl/trait that contains
846 /// * `self_bound`: bound for the `Self` type parameter, if any
847 /// * `method`: the method definition
848 fn check_method_body<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
849 item_generics: &ty::Generics<'tcx>,
850 sig: &'tcx hir::MethodSig,
851 body: &'tcx hir::Block,
852 id: ast::NodeId, span: Span) {
853 debug!("check_method_body(item_generics={:?}, id={})",
855 let param_env = ParameterEnvironment::for_item(ccx.tcx, id);
857 let fty = ccx.tcx.node_id_to_type(id);
858 debug!("check_method_body: fty={:?}", fty);
860 check_bare_fn(ccx, &sig.decl, body, id, span, fty, param_env);
863 fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
865 impl_trait_ref: &ty::TraitRef<'tcx>,
866 impl_items: &[hir::ImplItem]) {
867 // Locate trait methods
869 let trait_items = tcx.trait_items(impl_trait_ref.def_id);
870 let mut overridden_associated_type = None;
872 // Check existing impl methods to see if they are both present in trait
873 // and compatible with trait signature
874 for impl_item in impl_items {
875 let ty_impl_item = ccx.tcx.impl_or_trait_item(ccx.tcx.map.local_def_id(impl_item.id));
876 let ty_trait_item = trait_items.iter()
877 .find(|ac| ac.name() == ty_impl_item.name());
879 if let Some(ty_trait_item) = ty_trait_item {
880 match impl_item.node {
881 hir::ImplItemKind::Const(..) => {
882 let impl_const = match ty_impl_item {
883 ty::ConstTraitItem(ref cti) => cti,
884 _ => tcx.sess.span_bug(impl_item.span, "non-const impl-item for const")
887 // Find associated const definition.
888 if let &ty::ConstTraitItem(ref trait_const) = ty_trait_item {
889 compare_const_impl(ccx.tcx,
895 span_err!(tcx.sess, impl_item.span, E0323,
896 "item `{}` is an associated const, \
897 which doesn't match its trait `{:?}`",
902 hir::ImplItemKind::Method(ref sig, ref body) => {
903 check_trait_fn_not_const(ccx, impl_item.span, sig.constness);
905 let impl_method = match ty_impl_item {
906 ty::MethodTraitItem(ref mti) => mti,
907 _ => tcx.sess.span_bug(impl_item.span, "non-method impl-item for method")
910 if let &ty::MethodTraitItem(ref trait_method) = ty_trait_item {
911 compare_impl_method(ccx.tcx,
918 span_err!(tcx.sess, impl_item.span, E0324,
919 "item `{}` is an associated method, \
920 which doesn't match its trait `{:?}`",
925 hir::ImplItemKind::Type(_) => {
926 let impl_type = match ty_impl_item {
927 ty::TypeTraitItem(ref tti) => tti,
928 _ => tcx.sess.span_bug(impl_item.span, "non-type impl-item for type")
931 if let &ty::TypeTraitItem(ref at) = ty_trait_item {
932 if let Some(_) = at.ty {
933 overridden_associated_type = Some(impl_item);
936 span_err!(tcx.sess, impl_item.span, E0325,
937 "item `{}` is an associated type, \
938 which doesn't match its trait `{:?}`",
947 // Check for missing items from trait
948 let provided_methods = tcx.provided_trait_methods(impl_trait_ref.def_id);
949 let mut missing_items = Vec::new();
950 let mut invalidated_items = Vec::new();
951 let associated_type_overridden = overridden_associated_type.is_some();
952 for trait_item in trait_items.iter() {
954 ty::ConstTraitItem(ref associated_const) => {
955 let is_implemented = impl_items.iter().any(|ii| {
957 hir::ImplItemKind::Const(..) => {
958 ii.name == associated_const.name
963 let is_provided = associated_const.has_value;
967 missing_items.push(associated_const.name);
968 } else if associated_type_overridden {
969 invalidated_items.push(associated_const.name);
973 ty::MethodTraitItem(ref trait_method) => {
975 impl_items.iter().any(|ii| {
977 hir::ImplItemKind::Method(..) => {
978 ii.name == trait_method.name
984 provided_methods.iter().any(|m| m.name == trait_method.name);
987 missing_items.push(trait_method.name);
988 } else if associated_type_overridden {
989 invalidated_items.push(trait_method.name);
993 ty::TypeTraitItem(ref associated_type) => {
994 let is_implemented = impl_items.iter().any(|ii| {
996 hir::ImplItemKind::Type(_) => {
997 ii.name == associated_type.name
1002 let is_provided = associated_type.ty.is_some();
1003 if !is_implemented {
1005 missing_items.push(associated_type.name);
1006 } else if associated_type_overridden {
1007 invalidated_items.push(associated_type.name);
1014 if !missing_items.is_empty() {
1015 span_err!(tcx.sess, impl_span, E0046,
1016 "not all trait items implemented, missing: `{}`",
1017 missing_items.iter()
1018 .map(|name| name.to_string())
1019 .collect::<Vec<_>>().join("`, `"))
1022 if !invalidated_items.is_empty() {
1023 let invalidator = overridden_associated_type.unwrap();
1024 span_err!(tcx.sess, invalidator.span, E0399,
1025 "the following trait items need to be reimplemented \
1026 as `{}` was overridden: `{}`",
1028 invalidated_items.iter()
1029 .map(|name| name.to_string())
1030 .collect::<Vec<_>>().join("`, `"))
1034 fn report_cast_to_unsized_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
1041 if t_cast.references_error() || t_expr.references_error() {
1044 let tstr = fcx.infcx().ty_to_string(t_cast);
1045 let mut err = fcx.type_error_struct(span, |actual| {
1046 format!("cast to unsized type: `{}` as `{}`", actual, tstr)
1049 ty::TyRef(_, ty::TypeAndMut { mutbl: mt, .. }) => {
1050 let mtstr = match mt {
1051 hir::MutMutable => "mut ",
1052 hir::MutImmutable => ""
1054 if t_cast.is_trait() {
1055 match fcx.tcx().sess.codemap().span_to_snippet(t_span) {
1057 err.span_suggestion(t_span,
1058 "try casting to a reference instead:",
1059 format!("&{}{}", mtstr, s));
1062 span_help!(err, t_span,
1063 "did you mean `&{}{}`?", mtstr, tstr),
1066 span_help!(err, span,
1067 "consider using an implicit coercion to `&{}{}` instead",
1072 match fcx.tcx().sess.codemap().span_to_snippet(t_span) {
1074 err.span_suggestion(t_span,
1075 "try casting to a `Box` instead:",
1076 format!("Box<{}>", s));
1079 span_help!(err, t_span, "did you mean `Box<{}>`?", tstr),
1083 span_help!(err, e_span,
1084 "consider using a box or reference as appropriate");
1088 fcx.write_error(id);
1092 impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> {
1093 fn tcx(&self) -> &ty::ctxt<'tcx> { self.ccx.tcx }
1095 fn get_item_type_scheme(&self, _: Span, id: DefId)
1096 -> Result<ty::TypeScheme<'tcx>, ErrorReported>
1098 Ok(self.tcx().lookup_item_type(id))
1101 fn get_trait_def(&self, _: Span, id: DefId)
1102 -> Result<&'tcx ty::TraitDef<'tcx>, ErrorReported>
1104 Ok(self.tcx().lookup_trait_def(id))
1107 fn ensure_super_predicates(&self, _: Span, _: DefId) -> Result<(), ErrorReported> {
1108 // all super predicates are ensured during collect pass
1112 fn get_free_substs(&self) -> Option<&Substs<'tcx>> {
1113 Some(&self.inh.infcx.parameter_environment.free_substs)
1116 fn get_type_parameter_bounds(&self,
1118 node_id: ast::NodeId)
1119 -> Result<Vec<ty::PolyTraitRef<'tcx>>, ErrorReported>
1121 let def = self.tcx().type_parameter_def(node_id);
1122 let r = self.inh.infcx.parameter_environment
1125 .filter_map(|predicate| {
1127 ty::Predicate::Trait(ref data) => {
1128 if data.0.self_ty().is_param(def.space, def.index) {
1129 Some(data.to_poly_trait_ref())
1143 fn trait_defines_associated_type_named(&self,
1144 trait_def_id: DefId,
1145 assoc_name: ast::Name)
1148 let trait_def = self.ccx.tcx.lookup_trait_def(trait_def_id);
1149 trait_def.associated_type_names.contains(&assoc_name)
1153 ty_param_def: Option<ty::TypeParameterDef<'tcx>>,
1154 substs: Option<&mut subst::Substs<'tcx>>,
1155 space: Option<subst::ParamSpace>,
1156 span: Span) -> Ty<'tcx> {
1157 // Grab the default doing subsitution
1158 let default = ty_param_def.and_then(|def| {
1159 def.default.map(|ty| type_variable::Default {
1160 ty: ty.subst_spanned(self.tcx(), substs.as_ref().unwrap(), Some(span)),
1162 def_id: def.default_def_id
1166 let ty_var = self.infcx().next_ty_var_with_default(default);
1168 // Finally we add the type variable to the substs
1171 Some(substs) => { substs.types.push(space.unwrap(), ty_var); ty_var }
1175 fn projected_ty_from_poly_trait_ref(&self,
1177 poly_trait_ref: ty::PolyTraitRef<'tcx>,
1178 item_name: ast::Name)
1181 let (trait_ref, _) =
1182 self.infcx().replace_late_bound_regions_with_fresh_var(
1184 infer::LateBoundRegionConversionTime::AssocTypeProjection(item_name),
1187 self.normalize_associated_type(span, trait_ref, item_name)
1190 fn projected_ty(&self,
1192 trait_ref: ty::TraitRef<'tcx>,
1193 item_name: ast::Name)
1196 self.normalize_associated_type(span, trait_ref, item_name)
1200 impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
1201 fn tcx(&self) -> &ty::ctxt<'tcx> { self.ccx.tcx }
1203 pub fn infcx(&self) -> &infer::InferCtxt<'a,'tcx> {
1207 pub fn param_env(&self) -> &ty::ParameterEnvironment<'a,'tcx> {
1208 &self.inh.infcx.parameter_environment
1211 pub fn sess(&self) -> &Session {
1215 pub fn err_count_since_creation(&self) -> usize {
1216 self.ccx.tcx.sess.err_count() - self.err_count_on_creation
1219 /// Resolves type variables in `ty` if possible. Unlike the infcx
1220 /// version, this version will also select obligations if it seems
1221 /// useful, in an effort to get more type information.
1222 fn resolve_type_vars_if_possible(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> {
1223 debug!("resolve_type_vars_if_possible(ty={:?})", ty);
1225 // No TyInfer()? Nothing needs doing.
1226 if !ty.has_infer_types() {
1227 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1231 // If `ty` is a type variable, see whether we already know what it is.
1232 ty = self.infcx().resolve_type_vars_if_possible(&ty);
1233 if !ty.has_infer_types() {
1234 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1238 // If not, try resolving pending obligations as much as
1239 // possible. This can help substantially when there are
1240 // indirect dependencies that don't seem worth tracking
1242 self.select_obligations_where_possible();
1243 ty = self.infcx().resolve_type_vars_if_possible(&ty);
1245 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1249 fn record_deferred_call_resolution(&self,
1250 closure_def_id: DefId,
1251 r: DeferredCallResolutionHandler<'tcx>) {
1252 let mut deferred_call_resolutions = self.inh.deferred_call_resolutions.borrow_mut();
1253 deferred_call_resolutions.entry(closure_def_id).or_insert(vec![]).push(r);
1256 fn remove_deferred_call_resolutions(&self,
1257 closure_def_id: DefId)
1258 -> Vec<DeferredCallResolutionHandler<'tcx>>
1260 let mut deferred_call_resolutions = self.inh.deferred_call_resolutions.borrow_mut();
1261 deferred_call_resolutions.remove(&closure_def_id).unwrap_or(Vec::new())
1264 pub fn tag(&self) -> String {
1265 let self_ptr: *const FnCtxt = self;
1266 format!("{:?}", self_ptr)
1269 pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> Ty<'tcx> {
1270 match self.inh.locals.borrow().get(&nid) {
1273 span_err!(self.tcx().sess, span, E0513,
1274 "no type for local variable {}",
1276 self.tcx().types.err
1282 pub fn write_ty(&self, node_id: ast::NodeId, ty: Ty<'tcx>) {
1283 debug!("write_ty({}, {:?}) in fcx {}",
1284 node_id, ty, self.tag());
1285 self.inh.tables.borrow_mut().node_types.insert(node_id, ty);
1288 pub fn write_substs(&self, node_id: ast::NodeId, substs: ty::ItemSubsts<'tcx>) {
1289 if !substs.substs.is_noop() {
1290 debug!("write_substs({}, {:?}) in fcx {}",
1295 self.inh.tables.borrow_mut().item_substs.insert(node_id, substs);
1299 pub fn write_autoderef_adjustment(&self,
1300 node_id: ast::NodeId,
1302 self.write_adjustment(
1304 adjustment::AdjustDerefRef(adjustment::AutoDerefRef {
1312 pub fn write_adjustment(&self,
1313 node_id: ast::NodeId,
1314 adj: adjustment::AutoAdjustment<'tcx>) {
1315 debug!("write_adjustment(node_id={}, adj={:?})", node_id, adj);
1317 if adj.is_identity() {
1321 self.inh.tables.borrow_mut().adjustments.insert(node_id, adj);
1324 /// Basically whenever we are converting from a type scheme into
1325 /// the fn body space, we always want to normalize associated
1326 /// types as well. This function combines the two.
1327 fn instantiate_type_scheme<T>(&self,
1329 substs: &Substs<'tcx>,
1332 where T : TypeFoldable<'tcx>
1334 let value = value.subst(self.tcx(), substs);
1335 let result = self.normalize_associated_types_in(span, &value);
1336 debug!("instantiate_type_scheme(value={:?}, substs={:?}) = {:?}",
1343 /// As `instantiate_type_scheme`, but for the bounds found in a
1344 /// generic type scheme.
1345 fn instantiate_bounds(&self,
1347 substs: &Substs<'tcx>,
1348 bounds: &ty::GenericPredicates<'tcx>)
1349 -> ty::InstantiatedPredicates<'tcx>
1351 ty::InstantiatedPredicates {
1352 predicates: self.instantiate_type_scheme(span, substs, &bounds.predicates)
1357 fn normalize_associated_types_in<T>(&self, span: Span, value: &T) -> T
1358 where T : TypeFoldable<'tcx>
1360 self.inh.normalize_associated_types_in(span, self.body_id, value)
1363 fn normalize_associated_type(&self,
1365 trait_ref: ty::TraitRef<'tcx>,
1366 item_name: ast::Name)
1369 let cause = traits::ObligationCause::new(span,
1371 traits::ObligationCauseCode::MiscObligation);
1376 .normalize_projection_type(self.infcx(),
1378 trait_ref: trait_ref,
1379 item_name: item_name,
1384 /// Instantiates the type in `did` with the generics in `path` and returns
1385 /// it (registering the necessary trait obligations along the way).
1387 /// Note that this function is only intended to be used with type-paths,
1388 /// not with value-paths.
1389 pub fn instantiate_type(&self,
1394 debug!("instantiate_type(did={:?}, path={:?})", did, path);
1396 self.tcx().lookup_item_type(did);
1397 let type_predicates =
1398 self.tcx().lookup_predicates(did);
1399 let substs = astconv::ast_path_substs_for_ty(self, self,
1401 PathParamMode::Optional,
1402 &type_scheme.generics,
1403 path.segments.last().unwrap());
1404 debug!("instantiate_type: ty={:?} substs={:?}", &type_scheme.ty, &substs);
1406 self.instantiate_bounds(path.span, &substs, &type_predicates);
1407 self.add_obligations_for_parameters(
1408 traits::ObligationCause::new(
1411 traits::ItemObligation(did)),
1414 self.instantiate_type_scheme(path.span, &substs, &type_scheme.ty)
1417 /// Return the dict-like variant corresponding to a given `Def`.
1418 pub fn def_struct_variant(&self,
1421 -> Option<(ty::AdtDef<'tcx>, ty::VariantDef<'tcx>)>
1423 let (adt, variant) = match def {
1424 def::DefVariant(enum_id, variant_id, _) => {
1425 let adt = self.tcx().lookup_adt_def(enum_id);
1426 (adt, adt.variant_with_id(variant_id))
1428 def::DefTy(did, _) | def::DefStruct(did) => {
1429 let typ = self.tcx().lookup_item_type(did);
1430 if let ty::TyStruct(adt, _) = typ.ty.sty {
1431 (adt, adt.struct_variant())
1439 let var_kind = variant.kind();
1440 if var_kind == ty::VariantKind::Struct {
1441 Some((adt, variant))
1442 } else if var_kind == ty::VariantKind::Unit {
1443 if !self.tcx().sess.features.borrow().braced_empty_structs {
1444 let mut err = self.tcx().sess.struct_span_err(span,
1445 "empty structs and enum variants \
1446 with braces are unstable");
1447 fileline_help!(&mut err, span, "add #![feature(braced_empty_structs)] to \
1448 the crate features to enable");
1452 Some((adt, variant))
1458 pub fn write_nil(&self, node_id: ast::NodeId) {
1459 self.write_ty(node_id, self.tcx().mk_nil());
1461 pub fn write_error(&self, node_id: ast::NodeId) {
1462 self.write_ty(node_id, self.tcx().types.err);
1465 pub fn require_type_meets(&self,
1468 code: traits::ObligationCauseCode<'tcx>,
1469 bound: ty::BuiltinBound)
1471 self.register_builtin_bound(
1474 traits::ObligationCause::new(span, self.body_id, code));
1477 pub fn require_type_is_sized(&self,
1480 code: traits::ObligationCauseCode<'tcx>)
1482 self.require_type_meets(ty, span, code, ty::BoundSized);
1485 pub fn require_expr_have_sized_type(&self,
1487 code: traits::ObligationCauseCode<'tcx>)
1489 self.require_type_is_sized(self.expr_ty(expr), expr.span, code);
1492 pub fn type_is_known_to_be_sized(&self,
1497 traits::type_known_to_meet_builtin_bound(self.infcx(),
1503 pub fn register_builtin_bound(&self,
1505 builtin_bound: ty::BuiltinBound,
1506 cause: traits::ObligationCause<'tcx>)
1508 self.inh.infcx.fulfillment_cx.borrow_mut()
1509 .register_builtin_bound(self.infcx(), ty, builtin_bound, cause);
1512 pub fn register_predicate(&self,
1513 obligation: traits::PredicateObligation<'tcx>)
1515 debug!("register_predicate({:?})",
1517 self.inh.infcx.fulfillment_cx
1519 .register_predicate_obligation(self.infcx(), obligation);
1522 pub fn to_ty(&self, ast_t: &hir::Ty) -> Ty<'tcx> {
1523 let t = ast_ty_to_ty(self, self, ast_t);
1524 self.register_wf_obligation(t, ast_t.span, traits::MiscObligation);
1528 pub fn expr_ty(&self, ex: &hir::Expr) -> Ty<'tcx> {
1529 match self.inh.tables.borrow().node_types.get(&ex.id) {
1532 self.tcx().sess.bug(&format!("no type for expr in fcx {}",
1538 /// Apply `adjustment` to the type of `expr`
1539 pub fn adjust_expr_ty(&self,
1541 adjustment: Option<&adjustment::AutoAdjustment<'tcx>>)
1544 let raw_ty = self.expr_ty(expr);
1545 let raw_ty = self.infcx().shallow_resolve(raw_ty);
1546 let resolve_ty = |ty: Ty<'tcx>| self.infcx().resolve_type_vars_if_possible(&ty);
1547 raw_ty.adjust(self.tcx(), expr.span, expr.id, adjustment, |method_call| {
1548 self.inh.tables.borrow().method_map.get(&method_call)
1549 .map(|method| resolve_ty(method.ty))
1553 pub fn node_ty(&self, id: ast::NodeId) -> Ty<'tcx> {
1554 match self.inh.tables.borrow().node_types.get(&id) {
1556 None if self.err_count_since_creation() != 0 => self.tcx().types.err,
1558 self.tcx().sess.bug(
1559 &format!("no type for node {}: {} in fcx {}",
1560 id, self.tcx().map.node_to_string(id),
1566 pub fn item_substs(&self) -> Ref<NodeMap<ty::ItemSubsts<'tcx>>> {
1567 // NOTE: @jroesch this is hack that appears to be fixed on nightly, will monitor if
1568 // it changes when we upgrade the snapshot compiler
1569 fn project_item_susbts<'a, 'tcx>(tables: &'a ty::Tables<'tcx>)
1570 -> &'a NodeMap<ty::ItemSubsts<'tcx>> {
1574 Ref::map(self.inh.tables.borrow(), project_item_susbts)
1577 pub fn opt_node_ty_substs<F>(&self,
1580 F: FnOnce(&ty::ItemSubsts<'tcx>),
1582 match self.inh.tables.borrow().item_substs.get(&id) {
1588 pub fn mk_subty(&self,
1589 a_is_expected: bool,
1593 -> Result<(), TypeError<'tcx>> {
1594 infer::mk_subty(self.infcx(), a_is_expected, origin, sub, sup)
1597 pub fn mk_eqty(&self,
1598 a_is_expected: bool,
1602 -> Result<(), TypeError<'tcx>> {
1603 infer::mk_eqty(self.infcx(), a_is_expected, origin, sub, sup)
1606 pub fn mk_subr(&self,
1607 origin: infer::SubregionOrigin<'tcx>,
1610 infer::mk_subr(self.infcx(), origin, sub, sup)
1613 pub fn type_error_message<M>(&self,
1616 actual_ty: Ty<'tcx>,
1617 err: Option<&TypeError<'tcx>>)
1618 where M: FnOnce(String) -> String,
1620 self.infcx().type_error_message(sp, mk_msg, actual_ty, err);
1623 pub fn type_error_struct<M>(&self,
1626 actual_ty: Ty<'tcx>,
1627 err: Option<&TypeError<'tcx>>)
1628 -> DiagnosticBuilder<'tcx>
1629 where M: FnOnce(String) -> String,
1631 self.infcx().type_error_struct(sp, mk_msg, actual_ty, err)
1634 pub fn report_mismatched_types(&self,
1638 err: &TypeError<'tcx>) {
1639 self.infcx().report_mismatched_types(sp, e, a, err)
1642 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1643 /// outlive the region `r`.
1644 pub fn register_region_obligation(&self,
1647 cause: traits::ObligationCause<'tcx>)
1649 let mut fulfillment_cx = self.inh.infcx.fulfillment_cx.borrow_mut();
1650 fulfillment_cx.register_region_obligation(ty, region, cause);
1653 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1654 /// outlive the region `r`.
1655 pub fn register_wf_obligation(&self,
1658 code: traits::ObligationCauseCode<'tcx>)
1660 // WF obligations never themselves fail, so no real need to give a detailed cause:
1661 let cause = traits::ObligationCause::new(span, self.body_id, code);
1662 self.register_predicate(traits::Obligation::new(cause, ty::Predicate::WellFormed(ty)));
1665 pub fn register_old_wf_obligation(&self,
1668 code: traits::ObligationCauseCode<'tcx>)
1670 // Registers an "old-style" WF obligation that uses the
1671 // implicator code. This is basically a buggy version of
1672 // `register_wf_obligation` that is being kept around
1673 // temporarily just to help with phasing in the newer rules.
1675 // FIXME(#27579) all uses of this should be migrated to register_wf_obligation eventually
1676 let cause = traits::ObligationCause::new(span, self.body_id, code);
1677 self.register_region_obligation(ty, ty::ReEmpty, cause);
1680 /// Registers obligations that all types appearing in `substs` are well-formed.
1681 pub fn add_wf_bounds(&self, substs: &Substs<'tcx>, expr: &hir::Expr)
1683 for &ty in &substs.types {
1684 self.register_wf_obligation(ty, expr.span, traits::MiscObligation);
1688 /// Given a fully substituted set of bounds (`generic_bounds`), and the values with which each
1689 /// type/region parameter was instantiated (`substs`), creates and registers suitable
1690 /// trait/region obligations.
1692 /// For example, if there is a function:
1695 /// fn foo<'a,T:'a>(...)
1698 /// and a reference:
1704 /// Then we will create a fresh region variable `'$0` and a fresh type variable `$1` for `'a`
1705 /// and `T`. This routine will add a region obligation `$1:'$0` and register it locally.
1706 pub fn add_obligations_for_parameters(&self,
1707 cause: traits::ObligationCause<'tcx>,
1708 predicates: &ty::InstantiatedPredicates<'tcx>)
1710 assert!(!predicates.has_escaping_regions());
1712 debug!("add_obligations_for_parameters(predicates={:?})",
1715 for obligation in traits::predicates_for_generics(cause, predicates) {
1716 self.register_predicate(obligation);
1720 // FIXME(arielb1): use this instead of field.ty everywhere
1721 pub fn field_ty(&self,
1723 field: ty::FieldDef<'tcx>,
1724 substs: &Substs<'tcx>)
1727 self.normalize_associated_types_in(span,
1728 &field.ty(self.tcx(), substs))
1731 // Only for fields! Returns <none> for methods>
1732 // Indifferent to privacy flags
1733 fn check_casts(&self) {
1734 let mut deferred_cast_checks = self.inh.deferred_cast_checks.borrow_mut();
1735 for cast in deferred_cast_checks.drain(..) {
1740 /// Apply "fallbacks" to some types
1741 /// ! gets replaced with (), unconstrained ints with i32, and unconstrained floats with f64.
1742 fn default_type_parameters(&self) {
1743 use middle::ty::error::UnconstrainedNumeric::Neither;
1744 use middle::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1745 for ty in &self.infcx().unsolved_variables() {
1746 let resolved = self.infcx().resolve_type_vars_if_possible(ty);
1747 if self.infcx().type_var_diverges(resolved) {
1748 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1750 match self.infcx().type_is_unconstrained_numeric(resolved) {
1751 UnconstrainedInt => {
1752 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
1754 UnconstrainedFloat => {
1755 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
1763 fn select_all_obligations_and_apply_defaults(&self) {
1764 if self.tcx().sess.features.borrow().default_type_parameter_fallback {
1765 self.new_select_all_obligations_and_apply_defaults();
1767 self.old_select_all_obligations_and_apply_defaults();
1771 // Implements old type inference fallback algorithm
1772 fn old_select_all_obligations_and_apply_defaults(&self) {
1773 self.select_obligations_where_possible();
1774 self.default_type_parameters();
1775 self.select_obligations_where_possible();
1778 fn new_select_all_obligations_and_apply_defaults(&self) {
1779 use middle::ty::error::UnconstrainedNumeric::Neither;
1780 use middle::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1782 // For the time being this errs on the side of being memory wasteful but provides better
1784 // let type_variables = self.infcx().type_variables.clone();
1786 // There is a possibility that this algorithm will have to run an arbitrary number of times
1787 // to terminate so we bound it by the compiler's recursion limit.
1788 for _ in 0..self.tcx().sess.recursion_limit.get() {
1789 // First we try to solve all obligations, it is possible that the last iteration
1790 // has made it possible to make more progress.
1791 self.select_obligations_where_possible();
1793 let mut conflicts = Vec::new();
1795 // Collect all unsolved type, integral and floating point variables.
1796 let unsolved_variables = self.inh.infcx.unsolved_variables();
1798 // We must collect the defaults *before* we do any unification. Because we have
1799 // directly attached defaults to the type variables any unification that occurs
1800 // will erase defaults causing conflicting defaults to be completely ignored.
1801 let default_map: FnvHashMap<_, _> =
1804 .filter_map(|t| self.infcx().default(t).map(|d| (t, d)))
1807 let mut unbound_tyvars = HashSet::new();
1809 debug!("select_all_obligations_and_apply_defaults: defaults={:?}", default_map);
1811 // We loop over the unsolved variables, resolving them and if they are
1812 // and unconstrainted numberic type we add them to the set of unbound
1813 // variables. We do this so we only apply literal fallback to type
1814 // variables without defaults.
1815 for ty in &unsolved_variables {
1816 let resolved = self.infcx().resolve_type_vars_if_possible(ty);
1817 if self.infcx().type_var_diverges(resolved) {
1818 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1820 match self.infcx().type_is_unconstrained_numeric(resolved) {
1821 UnconstrainedInt | UnconstrainedFloat => {
1822 unbound_tyvars.insert(resolved);
1829 // We now remove any numeric types that also have defaults, and instead insert
1830 // the type variable with a defined fallback.
1831 for ty in &unsolved_variables {
1832 if let Some(_default) = default_map.get(ty) {
1833 let resolved = self.infcx().resolve_type_vars_if_possible(ty);
1835 debug!("select_all_obligations_and_apply_defaults: ty: {:?} with default: {:?}",
1838 match resolved.sty {
1839 ty::TyInfer(ty::TyVar(_)) => {
1840 unbound_tyvars.insert(ty);
1843 ty::TyInfer(ty::IntVar(_)) | ty::TyInfer(ty::FloatVar(_)) => {
1844 unbound_tyvars.insert(ty);
1845 if unbound_tyvars.contains(resolved) {
1846 unbound_tyvars.remove(resolved);
1855 // If there are no more fallbacks to apply at this point we have applied all possible
1856 // defaults and type inference will proceed as normal.
1857 if unbound_tyvars.is_empty() {
1861 // Finally we go through each of the unbound type variables and unify them with
1862 // the proper fallback, reporting a conflicting default error if any of the
1863 // unifications fail. We know it must be a conflicting default because the
1864 // variable would only be in `unbound_tyvars` and have a concrete value if
1865 // it had been solved by previously applying a default.
1867 // We wrap this in a transaction for error reporting, if we detect a conflict
1868 // we will rollback the inference context to its prior state so we can probe
1869 // for conflicts and correctly report them.
1872 let _ = self.infcx().commit_if_ok(|_: &infer::CombinedSnapshot| {
1873 for ty in &unbound_tyvars {
1874 if self.infcx().type_var_diverges(ty) {
1875 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1877 match self.infcx().type_is_unconstrained_numeric(ty) {
1878 UnconstrainedInt => {
1879 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
1881 UnconstrainedFloat => {
1882 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
1885 if let Some(default) = default_map.get(ty) {
1886 let default = default.clone();
1887 match infer::mk_eqty(self.infcx(), false,
1888 TypeOrigin::Misc(default.origin_span),
1892 conflicts.push((*ty, default));
1901 // If there are conflicts we rollback, otherwise commit
1902 if conflicts.len() > 0 {
1909 if conflicts.len() > 0 {
1910 // Loop through each conflicting default, figuring out the default that caused
1911 // a unification failure and then report an error for each.
1912 for (conflict, default) in conflicts {
1913 let conflicting_default =
1914 self.find_conflicting_default(&unbound_tyvars, &default_map, conflict)
1915 .unwrap_or(type_variable::Default {
1916 ty: self.infcx().next_ty_var(),
1917 origin_span: codemap::DUMMY_SP,
1918 def_id: self.tcx().map.local_def_id(0) // what do I put here?
1921 // This is to ensure that we elimnate any non-determinism from the error
1922 // reporting by fixing an order, it doesn't matter what order we choose
1923 // just that it is consistent.
1924 let (first_default, second_default) =
1925 if default.def_id < conflicting_default.def_id {
1926 (default, conflicting_default)
1928 (conflicting_default, default)
1932 self.infcx().report_conflicting_default_types(
1933 first_default.origin_span,
1940 self.select_obligations_where_possible();
1943 // For use in error handling related to default type parameter fallback. We explicitly
1944 // apply the default that caused conflict first to a local version of the type variable
1945 // table then apply defaults until we find a conflict. That default must be the one
1946 // that caused conflict earlier.
1947 fn find_conflicting_default(&self,
1948 unbound_vars: &HashSet<Ty<'tcx>>,
1949 default_map: &FnvHashMap<&Ty<'tcx>, type_variable::Default<'tcx>>,
1951 -> Option<type_variable::Default<'tcx>> {
1952 use middle::ty::error::UnconstrainedNumeric::Neither;
1953 use middle::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1955 // Ensure that we apply the conflicting default first
1956 let mut unbound_tyvars = Vec::with_capacity(unbound_vars.len() + 1);
1957 unbound_tyvars.push(conflict);
1958 unbound_tyvars.extend(unbound_vars.iter());
1960 let mut result = None;
1961 // We run the same code as above applying defaults in order, this time when
1962 // we find the conflict we just return it for error reporting above.
1964 // We also run this inside snapshot that never commits so we can do error
1965 // reporting for more then one conflict.
1966 for ty in &unbound_tyvars {
1967 if self.infcx().type_var_diverges(ty) {
1968 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1970 match self.infcx().type_is_unconstrained_numeric(ty) {
1971 UnconstrainedInt => {
1972 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
1974 UnconstrainedFloat => {
1975 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
1978 if let Some(default) = default_map.get(ty) {
1979 let default = default.clone();
1980 match infer::mk_eqty(self.infcx(), false,
1981 TypeOrigin::Misc(default.origin_span),
1985 result = Some(default);
1997 fn select_all_obligations_or_error(&self) {
1998 debug!("select_all_obligations_or_error");
2000 // upvar inference should have ensured that all deferred call
2001 // resolutions are handled by now.
2002 assert!(self.inh.deferred_call_resolutions.borrow().is_empty());
2004 self.select_all_obligations_and_apply_defaults();
2006 let mut fulfillment_cx = self.inh.infcx.fulfillment_cx.borrow_mut();
2007 match fulfillment_cx.select_all_or_error(self.infcx()) {
2009 Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
2013 /// Select as many obligations as we can at present.
2014 fn select_obligations_where_possible(&self) {
2016 self.inh.infcx.fulfillment_cx
2018 .select_where_possible(self.infcx())
2021 Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
2026 impl<'a, 'tcx> RegionScope for FnCtxt<'a, 'tcx> {
2027 fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> {
2028 Some(self.base_object_lifetime_default(span))
2031 fn base_object_lifetime_default(&self, span: Span) -> ty::Region {
2032 // RFC #599 specifies that object lifetime defaults take
2033 // precedence over other defaults. But within a fn body we
2034 // don't have a *default* region, rather we use inference to
2035 // find the *correct* region, which is strictly more general
2036 // (and anyway, within a fn body the right region may not even
2037 // be something the user can write explicitly, since it might
2038 // be some expression).
2039 self.infcx().next_region_var(infer::MiscVariable(span))
2042 fn anon_regions(&self, span: Span, count: usize)
2043 -> Result<Vec<ty::Region>, Option<Vec<ElisionFailureInfo>>> {
2044 Ok((0..count).map(|_| {
2045 self.infcx().next_region_var(infer::MiscVariable(span))
2050 /// Whether `autoderef` requires types to resolve.
2051 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
2052 pub enum UnresolvedTypeAction {
2053 /// Produce an error and return `TyError` whenever a type cannot
2054 /// be resolved (i.e. it is `TyInfer`).
2056 /// Go on without emitting any errors, and return the unresolved
2057 /// type. Useful for probing, e.g. in coercions.
2061 /// Executes an autoderef loop for the type `t`. At each step, invokes `should_stop` to decide
2062 /// whether to terminate the loop. Returns the final type and number of derefs that it performed.
2064 /// Note: this method does not modify the adjustments table. The caller is responsible for
2065 /// inserting an AutoAdjustment record into the `fcx` using one of the suitable methods.
2066 pub fn autoderef<'a, 'tcx, T, F>(fcx: &FnCtxt<'a, 'tcx>,
2069 opt_expr: Option<&hir::Expr>,
2070 unresolved_type_action: UnresolvedTypeAction,
2071 mut lvalue_pref: LvaluePreference,
2073 -> (Ty<'tcx>, usize, Option<T>)
2074 where F: FnMut(Ty<'tcx>, usize) -> Option<T>,
2076 debug!("autoderef(base_ty={:?}, opt_expr={:?}, lvalue_pref={:?})",
2081 let mut t = base_ty;
2082 for autoderefs in 0..fcx.tcx().sess.recursion_limit.get() {
2083 let resolved_t = match unresolved_type_action {
2084 UnresolvedTypeAction::Error => {
2085 structurally_resolved_type(fcx, sp, t)
2087 UnresolvedTypeAction::Ignore => {
2088 // We can continue even when the type cannot be resolved
2089 // (i.e. it is an inference variable) because `Ty::builtin_deref`
2090 // and `try_overloaded_deref` both simply return `None`
2091 // in such a case without producing spurious errors.
2092 fcx.resolve_type_vars_if_possible(t)
2095 if resolved_t.references_error() {
2096 return (resolved_t, autoderefs, None);
2099 match should_stop(resolved_t, autoderefs) {
2100 Some(x) => return (resolved_t, autoderefs, Some(x)),
2104 // Otherwise, deref if type is derefable:
2105 let mt = match resolved_t.builtin_deref(false, lvalue_pref) {
2106 Some(mt) => Some(mt),
2109 opt_expr.map(|expr| MethodCall::autoderef(expr.id, autoderefs as u32));
2111 // Super subtle: it might seem as though we should
2112 // pass `opt_expr` to `try_overloaded_deref`, so that
2113 // the (implicit) autoref of using an overloaded deref
2114 // would get added to the adjustment table. However we
2115 // do not do that, because it's kind of a
2116 // "meta-adjustment" -- instead, we just leave it
2117 // unrecorded and know that there "will be" an
2118 // autoref. regionck and other bits of the code base,
2119 // when they encounter an overloaded autoderef, have
2120 // to do some reconstructive surgery. This is a pretty
2121 // complex mess that is begging for a proper MIR.
2122 try_overloaded_deref(fcx, sp, method_call, None, resolved_t, lvalue_pref)
2128 if mt.mutbl == hir::MutImmutable {
2129 lvalue_pref = NoPreference;
2132 None => return (resolved_t, autoderefs, None)
2136 // We've reached the recursion limit, error gracefully.
2137 span_err!(fcx.tcx().sess, sp, E0055,
2138 "reached the recursion limit while auto-dereferencing {:?}",
2140 (fcx.tcx().types.err, 0, None)
2143 fn try_overloaded_deref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2145 method_call: Option<MethodCall>,
2146 base_expr: Option<&hir::Expr>,
2148 lvalue_pref: LvaluePreference)
2149 -> Option<ty::TypeAndMut<'tcx>>
2151 // Try DerefMut first, if preferred.
2152 let method = match (lvalue_pref, fcx.tcx().lang_items.deref_mut_trait()) {
2153 (PreferMutLvalue, Some(trait_did)) => {
2154 method::lookup_in_trait(fcx, span, base_expr,
2155 token::intern("deref_mut"), trait_did,
2161 // Otherwise, fall back to Deref.
2162 let method = match (method, fcx.tcx().lang_items.deref_trait()) {
2163 (None, Some(trait_did)) => {
2164 method::lookup_in_trait(fcx, span, base_expr,
2165 token::intern("deref"), trait_did,
2168 (method, _) => method
2171 make_overloaded_lvalue_return_type(fcx, method_call, method)
2174 /// For the overloaded lvalue expressions (`*x`, `x[3]`), the trait returns a type of `&T`, but the
2175 /// actual type we assign to the *expression* is `T`. So this function just peels off the return
2176 /// type by one layer to yield `T`. It also inserts the `method-callee` into the method map.
2177 fn make_overloaded_lvalue_return_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2178 method_call: Option<MethodCall>,
2179 method: Option<MethodCallee<'tcx>>)
2180 -> Option<ty::TypeAndMut<'tcx>>
2184 // extract method return type, which will be &T;
2185 // all LB regions should have been instantiated during method lookup
2186 let ret_ty = method.ty.fn_ret();
2187 let ret_ty = fcx.tcx().no_late_bound_regions(&ret_ty).unwrap().unwrap();
2189 if let Some(method_call) = method_call {
2190 fcx.inh.tables.borrow_mut().method_map.insert(method_call, method);
2193 // method returns &T, but the type as visible to user is T, so deref
2194 ret_ty.builtin_deref(true, NoPreference)
2200 fn lookup_indexing<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2202 base_expr: &'tcx hir::Expr,
2205 lvalue_pref: LvaluePreference)
2206 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2208 // FIXME(#18741) -- this is almost but not quite the same as the
2209 // autoderef that normal method probing does. They could likely be
2212 let (ty, autoderefs, final_mt) = autoderef(fcx,
2216 UnresolvedTypeAction::Error,
2219 try_index_step(fcx, MethodCall::expr(expr.id), expr, base_expr,
2220 adj_ty, idx, false, lvalue_pref, idx_ty)
2223 if final_mt.is_some() {
2227 // After we have fully autoderef'd, if the resulting type is [T; n], then
2228 // do a final unsized coercion to yield [T].
2229 if let ty::TyArray(element_ty, _) = ty.sty {
2230 let adjusted_ty = fcx.tcx().mk_slice(element_ty);
2231 try_index_step(fcx, MethodCall::expr(expr.id), expr, base_expr,
2232 adjusted_ty, autoderefs, true, lvalue_pref, idx_ty)
2238 /// To type-check `base_expr[index_expr]`, we progressively autoderef (and otherwise adjust)
2239 /// `base_expr`, looking for a type which either supports builtin indexing or overloaded indexing.
2240 /// This loop implements one step in that search; the autoderef loop is implemented by
2241 /// `lookup_indexing`.
2242 fn try_index_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2243 method_call: MethodCall,
2245 base_expr: &'tcx hir::Expr,
2246 adjusted_ty: Ty<'tcx>,
2249 lvalue_pref: LvaluePreference,
2251 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2253 let tcx = fcx.tcx();
2254 debug!("try_index_step(expr={:?}, base_expr.id={:?}, adjusted_ty={:?}, \
2255 autoderefs={}, unsize={}, index_ty={:?})",
2263 let input_ty = fcx.infcx().next_ty_var();
2265 // First, try built-in indexing.
2266 match (adjusted_ty.builtin_index(), &index_ty.sty) {
2267 (Some(ty), &ty::TyUint(ast::TyUs)) | (Some(ty), &ty::TyInfer(ty::IntVar(_))) => {
2268 debug!("try_index_step: success, using built-in indexing");
2269 // If we had `[T; N]`, we should've caught it before unsizing to `[T]`.
2271 fcx.write_autoderef_adjustment(base_expr.id, autoderefs);
2272 return Some((tcx.types.usize, ty));
2277 // Try `IndexMut` first, if preferred.
2278 let method = match (lvalue_pref, tcx.lang_items.index_mut_trait()) {
2279 (PreferMutLvalue, Some(trait_did)) => {
2280 method::lookup_in_trait_adjusted(fcx,
2283 token::intern("index_mut"),
2288 Some(vec![input_ty]))
2293 // Otherwise, fall back to `Index`.
2294 let method = match (method, tcx.lang_items.index_trait()) {
2295 (None, Some(trait_did)) => {
2296 method::lookup_in_trait_adjusted(fcx,
2299 token::intern("index"),
2304 Some(vec![input_ty]))
2306 (method, _) => method,
2309 // If some lookup succeeds, write callee into table and extract index/element
2310 // type from the method signature.
2311 // If some lookup succeeded, install method in table
2312 method.and_then(|method| {
2313 debug!("try_index_step: success, using overloaded indexing");
2314 make_overloaded_lvalue_return_type(fcx, Some(method_call), Some(method)).
2315 map(|ret| (input_ty, ret.ty))
2319 fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2321 method_fn_ty: Ty<'tcx>,
2322 callee_expr: &'tcx hir::Expr,
2323 args_no_rcvr: &'tcx [P<hir::Expr>],
2324 tuple_arguments: TupleArgumentsFlag,
2325 expected: Expectation<'tcx>)
2326 -> ty::FnOutput<'tcx> {
2327 if method_fn_ty.references_error() {
2328 let err_inputs = err_args(fcx.tcx(), args_no_rcvr.len());
2330 let err_inputs = match tuple_arguments {
2331 DontTupleArguments => err_inputs,
2332 TupleArguments => vec![fcx.tcx().mk_tup(err_inputs)],
2335 check_argument_types(fcx,
2342 ty::FnConverging(fcx.tcx().types.err)
2344 match method_fn_ty.sty {
2345 ty::TyBareFn(_, ref fty) => {
2346 // HACK(eddyb) ignore self in the definition (see above).
2347 let expected_arg_tys = expected_types_for_fn_args(fcx,
2351 &fty.sig.0.inputs[1..]);
2352 check_argument_types(fcx,
2354 &fty.sig.0.inputs[1..],
2355 &expected_arg_tys[..],
2362 fcx.tcx().sess.span_bug(callee_expr.span,
2363 "method without bare fn type");
2369 /// Generic function that factors out common logic from function calls, method calls and overloaded
2371 fn check_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2373 fn_inputs: &[Ty<'tcx>],
2374 expected_arg_tys: &[Ty<'tcx>],
2375 args: &'tcx [P<hir::Expr>],
2377 tuple_arguments: TupleArgumentsFlag) {
2378 let tcx = fcx.ccx.tcx;
2380 // Grab the argument types, supplying fresh type variables
2381 // if the wrong number of arguments were supplied
2382 let supplied_arg_count = if tuple_arguments == DontTupleArguments {
2388 // All the input types from the fn signature must outlive the call
2389 // so as to validate implied bounds.
2390 for &fn_input_ty in fn_inputs {
2391 fcx.register_wf_obligation(fn_input_ty, sp, traits::MiscObligation);
2394 let mut expected_arg_tys = expected_arg_tys;
2395 let expected_arg_count = fn_inputs.len();
2396 let formal_tys = if tuple_arguments == TupleArguments {
2397 let tuple_type = structurally_resolved_type(fcx, sp, fn_inputs[0]);
2398 match tuple_type.sty {
2399 ty::TyTuple(ref arg_types) => {
2400 if arg_types.len() != args.len() {
2401 span_err!(tcx.sess, sp, E0057,
2402 "this function takes {} parameter{} but {} parameter{} supplied",
2404 if arg_types.len() == 1 {""} else {"s"},
2406 if args.len() == 1 {" was"} else {"s were"});
2407 expected_arg_tys = &[];
2408 err_args(fcx.tcx(), args.len())
2410 expected_arg_tys = match expected_arg_tys.get(0) {
2411 Some(&ty) => match ty.sty {
2412 ty::TyTuple(ref tys) => &**tys,
2417 (*arg_types).clone()
2421 span_err!(tcx.sess, sp, E0059,
2422 "cannot use call notation; the first type parameter \
2423 for the function trait is neither a tuple nor unit");
2424 expected_arg_tys = &[];
2425 err_args(fcx.tcx(), args.len())
2428 } else if expected_arg_count == supplied_arg_count {
2430 } else if variadic {
2431 if supplied_arg_count >= expected_arg_count {
2434 span_err!(tcx.sess, sp, E0060,
2435 "this function takes at least {} parameter{} \
2436 but {} parameter{} supplied",
2438 if expected_arg_count == 1 {""} else {"s"},
2440 if supplied_arg_count == 1 {" was"} else {"s were"});
2441 expected_arg_tys = &[];
2442 err_args(fcx.tcx(), supplied_arg_count)
2445 span_err!(tcx.sess, sp, E0061,
2446 "this function takes {} parameter{} but {} parameter{} supplied",
2448 if expected_arg_count == 1 {""} else {"s"},
2450 if supplied_arg_count == 1 {" was"} else {"s were"});
2451 expected_arg_tys = &[];
2452 err_args(fcx.tcx(), supplied_arg_count)
2455 debug!("check_argument_types: formal_tys={:?}",
2456 formal_tys.iter().map(|t| fcx.infcx().ty_to_string(*t)).collect::<Vec<String>>());
2458 // Check the arguments.
2459 // We do this in a pretty awful way: first we typecheck any arguments
2460 // that are not anonymous functions, then we typecheck the anonymous
2461 // functions. This is so that we have more information about the types
2462 // of arguments when we typecheck the functions. This isn't really the
2463 // right way to do this.
2464 let xs = [false, true];
2465 let mut any_diverges = false; // has any of the arguments diverged?
2466 let mut warned = false; // have we already warned about unreachable code?
2467 for check_blocks in &xs {
2468 let check_blocks = *check_blocks;
2469 debug!("check_blocks={}", check_blocks);
2471 // More awful hacks: before we check argument types, try to do
2472 // an "opportunistic" vtable resolution of any trait bounds on
2473 // the call. This helps coercions.
2475 fcx.select_obligations_where_possible();
2478 // For variadic functions, we don't have a declared type for all of
2479 // the arguments hence we only do our usual type checking with
2480 // the arguments who's types we do know.
2481 let t = if variadic {
2483 } else if tuple_arguments == TupleArguments {
2488 for (i, arg) in args.iter().take(t).enumerate() {
2489 if any_diverges && !warned {
2493 .add_lint(lint::builtin::UNREACHABLE_CODE,
2496 "unreachable expression".to_string());
2499 let is_block = match arg.node {
2500 hir::ExprClosure(..) => true,
2504 if is_block == check_blocks {
2505 debug!("checking the argument");
2506 let formal_ty = formal_tys[i];
2508 // The special-cased logic below has three functions:
2509 // 1. Provide as good of an expected type as possible.
2510 let expected = expected_arg_tys.get(i).map(|&ty| {
2511 Expectation::rvalue_hint(fcx.tcx(), ty)
2514 check_expr_with_unifier(fcx,
2516 expected.unwrap_or(ExpectHasType(formal_ty)),
2518 // 2. Coerce to the most detailed type that could be coerced
2519 // to, which is `expected_ty` if `rvalue_hint` returns an
2520 // `ExprHasType(expected_ty)`, or the `formal_ty` otherwise.
2521 let coerce_ty = expected.and_then(|e| e.only_has_type(fcx));
2522 demand::coerce(fcx, arg.span, coerce_ty.unwrap_or(formal_ty), &**arg);
2524 // 3. Relate the expected type and the formal one,
2525 // if the expected type was used for the coercion.
2526 coerce_ty.map(|ty| demand::suptype(fcx, arg.span, formal_ty, ty));
2530 if let Some(&arg_ty) = fcx.inh.tables.borrow().node_types.get(&arg.id) {
2531 any_diverges = any_diverges || fcx.infcx().type_var_diverges(arg_ty);
2534 if any_diverges && !warned {
2535 let parent = fcx.ccx.tcx.map.get_parent_node(args[0].id);
2539 .add_lint(lint::builtin::UNREACHABLE_CODE,
2542 "unreachable call".to_string());
2548 // We also need to make sure we at least write the ty of the other
2549 // arguments which we skipped above.
2551 for arg in args.iter().skip(expected_arg_count) {
2552 check_expr(fcx, &**arg);
2554 // There are a few types which get autopromoted when passed via varargs
2555 // in C but we just error out instead and require explicit casts.
2556 let arg_ty = structurally_resolved_type(fcx, arg.span,
2557 fcx.expr_ty(&**arg));
2559 ty::TyFloat(ast::TyF32) => {
2560 fcx.type_error_message(arg.span,
2562 format!("can't pass an {} to variadic \
2563 function, cast to c_double", t)
2566 ty::TyInt(ast::TyI8) | ty::TyInt(ast::TyI16) | ty::TyBool => {
2567 fcx.type_error_message(arg.span, |t| {
2568 format!("can't pass {} to variadic \
2569 function, cast to c_int",
2573 ty::TyUint(ast::TyU8) | ty::TyUint(ast::TyU16) => {
2574 fcx.type_error_message(arg.span, |t| {
2575 format!("can't pass {} to variadic \
2576 function, cast to c_uint",
2586 // FIXME(#17596) Ty<'tcx> is incorrectly invariant w.r.t 'tcx.
2587 fn err_args<'tcx>(tcx: &ty::ctxt<'tcx>, len: usize) -> Vec<Ty<'tcx>> {
2588 (0..len).map(|_| tcx.types.err).collect()
2591 fn write_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2592 call_expr: &hir::Expr,
2593 output: ty::FnOutput<'tcx>) {
2594 fcx.write_ty(call_expr.id, match output {
2595 ty::FnConverging(output_ty) => output_ty,
2596 ty::FnDiverging => fcx.infcx().next_diverging_ty_var()
2600 // AST fragment checking
2601 fn check_lit<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2603 expected: Expectation<'tcx>)
2606 let tcx = fcx.ccx.tcx;
2609 ast::LitStr(..) => tcx.mk_static_str(),
2610 ast::LitByteStr(ref v) => {
2611 tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic),
2612 tcx.mk_array(tcx.types.u8, v.len()))
2614 ast::LitByte(_) => tcx.types.u8,
2615 ast::LitChar(_) => tcx.types.char,
2616 ast::LitInt(_, ast::SignedIntLit(t, _)) => tcx.mk_mach_int(t),
2617 ast::LitInt(_, ast::UnsignedIntLit(t)) => tcx.mk_mach_uint(t),
2618 ast::LitInt(_, ast::UnsuffixedIntLit(_)) => {
2619 let opt_ty = expected.to_option(fcx).and_then(|ty| {
2621 ty::TyInt(_) | ty::TyUint(_) => Some(ty),
2622 ty::TyChar => Some(tcx.types.u8),
2623 ty::TyRawPtr(..) => Some(tcx.types.usize),
2624 ty::TyBareFn(..) => Some(tcx.types.usize),
2628 opt_ty.unwrap_or_else(
2629 || tcx.mk_int_var(fcx.infcx().next_int_var_id()))
2631 ast::LitFloat(_, t) => tcx.mk_mach_float(t),
2632 ast::LitFloatUnsuffixed(_) => {
2633 let opt_ty = expected.to_option(fcx).and_then(|ty| {
2635 ty::TyFloat(_) => Some(ty),
2639 opt_ty.unwrap_or_else(
2640 || tcx.mk_float_var(fcx.infcx().next_float_var_id()))
2642 ast::LitBool(_) => tcx.types.bool
2646 fn check_expr_eq_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2647 expr: &'tcx hir::Expr,
2648 expected: Ty<'tcx>) {
2649 check_expr_with_unifier(
2650 fcx, expr, ExpectHasType(expected), NoPreference,
2651 || demand::eqtype(fcx, expr.span, expected, fcx.expr_ty(expr)));
2654 pub fn check_expr_has_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2655 expr: &'tcx hir::Expr,
2656 expected: Ty<'tcx>) {
2657 check_expr_with_unifier(
2658 fcx, expr, ExpectHasType(expected), NoPreference,
2659 || demand::suptype(fcx, expr.span, expected, fcx.expr_ty(expr)));
2662 fn check_expr_coercable_to_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2663 expr: &'tcx hir::Expr,
2664 expected: Ty<'tcx>) {
2665 check_expr_with_unifier(
2666 fcx, expr, ExpectHasType(expected), NoPreference,
2667 || demand::coerce(fcx, expr.span, expected, expr));
2670 fn check_expr_with_hint<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, expr: &'tcx hir::Expr,
2671 expected: Ty<'tcx>) {
2672 check_expr_with_unifier(
2673 fcx, expr, ExpectHasType(expected), NoPreference,
2677 fn check_expr_with_expectation<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2678 expr: &'tcx hir::Expr,
2679 expected: Expectation<'tcx>) {
2680 check_expr_with_unifier(
2681 fcx, expr, expected, NoPreference,
2685 fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2686 expr: &'tcx hir::Expr,
2687 expected: Expectation<'tcx>,
2688 lvalue_pref: LvaluePreference)
2690 check_expr_with_unifier(fcx, expr, expected, lvalue_pref, || ())
2693 fn check_expr<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, expr: &'tcx hir::Expr) {
2694 check_expr_with_unifier(fcx, expr, NoExpectation, NoPreference, || ())
2697 fn check_expr_with_lvalue_pref<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, expr: &'tcx hir::Expr,
2698 lvalue_pref: LvaluePreference) {
2699 check_expr_with_unifier(fcx, expr, NoExpectation, lvalue_pref, || ())
2702 // determine the `self` type, using fresh variables for all variables
2703 // declared on the impl declaration e.g., `impl<A,B> for Vec<(A,B)>`
2704 // would return ($0, $1) where $0 and $1 are freshly instantiated type
2706 pub fn impl_self_ty<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2707 span: Span, // (potential) receiver for this impl
2709 -> TypeAndSubsts<'tcx> {
2710 let tcx = fcx.tcx();
2712 let ity = tcx.lookup_item_type(did);
2713 let (tps, rps, raw_ty) =
2714 (ity.generics.types.get_slice(subst::TypeSpace),
2715 ity.generics.regions.get_slice(subst::TypeSpace),
2718 debug!("impl_self_ty: tps={:?} rps={:?} raw_ty={:?}", tps, rps, raw_ty);
2720 let rps = fcx.inh.infcx.region_vars_for_defs(span, rps);
2721 let mut substs = subst::Substs::new(
2722 VecPerParamSpace::empty(),
2723 VecPerParamSpace::new(rps, Vec::new(), Vec::new()));
2724 fcx.inh.infcx.type_vars_for_defs(span, ParamSpace::TypeSpace, &mut substs, tps);
2725 let substd_ty = fcx.instantiate_type_scheme(span, &substs, &raw_ty);
2727 TypeAndSubsts { substs: substs, ty: substd_ty }
2730 /// Controls whether the arguments are tupled. This is used for the call
2733 /// Tupling means that all call-side arguments are packed into a tuple and
2734 /// passed as a single parameter. For example, if tupling is enabled, this
2737 /// fn f(x: (isize, isize))
2739 /// Can be called as:
2746 #[derive(Clone, Eq, PartialEq)]
2747 enum TupleArgumentsFlag {
2752 /// Unifies the return type with the expected type early, for more coercions
2753 /// and forward type information on the argument expressions.
2754 fn expected_types_for_fn_args<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2756 expected_ret: Expectation<'tcx>,
2757 formal_ret: ty::FnOutput<'tcx>,
2758 formal_args: &[Ty<'tcx>])
2760 let expected_args = expected_ret.only_has_type(fcx).and_then(|ret_ty| {
2761 if let ty::FnConverging(formal_ret_ty) = formal_ret {
2762 fcx.infcx().commit_regions_if_ok(|| {
2763 // Attempt to apply a subtyping relationship between the formal
2764 // return type (likely containing type variables if the function
2765 // is polymorphic) and the expected return type.
2766 // No argument expectations are produced if unification fails.
2767 let origin = TypeOrigin::Misc(call_span);
2768 let ures = fcx.infcx().sub_types(false, origin, formal_ret_ty, ret_ty);
2769 // FIXME(#15760) can't use try! here, FromError doesn't default
2770 // to identity so the resulting type is not constrained.
2771 if let Err(e) = ures {
2775 // Record all the argument types, with the substitutions
2776 // produced from the above subtyping unification.
2777 Ok(formal_args.iter().map(|ty| {
2778 fcx.infcx().resolve_type_vars_if_possible(ty)
2784 }).unwrap_or(vec![]);
2785 debug!("expected_types_for_fn_args(formal={:?} -> {:?}, expected={:?} -> {:?})",
2786 formal_args, formal_ret,
2787 expected_args, expected_ret);
2792 /// If an expression has any sub-expressions that result in a type error,
2793 /// inspecting that expression's type with `ty.references_error()` will return
2794 /// true. Likewise, if an expression is known to diverge, inspecting its
2795 /// type with `ty::type_is_bot` will return true (n.b.: since Rust is
2796 /// strict, _|_ can appear in the type of an expression that does not,
2797 /// itself, diverge: for example, fn() -> _|_.)
2798 /// Note that inspecting a type's structure *directly* may expose the fact
2799 /// that there are actually multiple representations for `TyError`, so avoid
2800 /// that when err needs to be handled differently.
2801 fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
2802 expr: &'tcx hir::Expr,
2803 expected: Expectation<'tcx>,
2804 lvalue_pref: LvaluePreference,
2808 debug!(">> typechecking: expr={:?} expected={:?}",
2811 // Checks a method call.
2812 fn check_method_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2813 expr: &'tcx hir::Expr,
2814 method_name: Spanned<ast::Name>,
2815 args: &'tcx [P<hir::Expr>],
2817 expected: Expectation<'tcx>,
2818 lvalue_pref: LvaluePreference) {
2819 let rcvr = &*args[0];
2820 check_expr_with_lvalue_pref(fcx, &*rcvr, lvalue_pref);
2822 // no need to check for bot/err -- callee does that
2823 let expr_t = structurally_resolved_type(fcx,
2825 fcx.expr_ty(&*rcvr));
2827 let tps = tps.iter().map(|ast_ty| fcx.to_ty(&**ast_ty)).collect::<Vec<_>>();
2828 let fn_ty = match method::lookup(fcx,
2836 let method_ty = method.ty;
2837 let method_call = MethodCall::expr(expr.id);
2838 fcx.inh.tables.borrow_mut().method_map.insert(method_call, method);
2842 method::report_error(fcx, method_name.span, expr_t,
2843 method_name.node, Some(rcvr), error);
2844 fcx.write_error(expr.id);
2849 // Call the generic checker.
2850 let ret_ty = check_method_argument_types(fcx,
2858 write_call(fcx, expr, ret_ty);
2861 // A generic function for checking the then and else in an if
2863 fn check_then_else<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2864 cond_expr: &'tcx hir::Expr,
2865 then_blk: &'tcx hir::Block,
2866 opt_else_expr: Option<&'tcx hir::Expr>,
2869 expected: Expectation<'tcx>) {
2870 check_expr_has_type(fcx, cond_expr, fcx.tcx().types.bool);
2872 let expected = expected.adjust_for_branches(fcx);
2873 check_block_with_expected(fcx, then_blk, expected);
2874 let then_ty = fcx.node_ty(then_blk.id);
2876 let branches_ty = match opt_else_expr {
2877 Some(ref else_expr) => {
2878 check_expr_with_expectation(fcx, &**else_expr, expected);
2879 let else_ty = fcx.expr_ty(&**else_expr);
2880 infer::common_supertype(fcx.infcx(),
2881 TypeOrigin::IfExpression(sp),
2887 infer::common_supertype(fcx.infcx(),
2888 TypeOrigin::IfExpressionWithNoElse(sp),
2895 let cond_ty = fcx.expr_ty(cond_expr);
2896 let if_ty = if cond_ty.references_error() {
2902 fcx.write_ty(id, if_ty);
2905 // Check field access expressions
2906 fn check_field<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
2907 expr: &'tcx hir::Expr,
2908 lvalue_pref: LvaluePreference,
2909 base: &'tcx hir::Expr,
2910 field: &Spanned<ast::Name>) {
2911 check_expr_with_lvalue_pref(fcx, base, lvalue_pref);
2912 let expr_t = structurally_resolved_type(fcx, expr.span,
2914 // FIXME(eddyb) #12808 Integrate privacy into this auto-deref loop.
2915 let (_, autoderefs, field_ty) = autoderef(fcx,
2919 UnresolvedTypeAction::Error,
2923 ty::TyStruct(base_def, substs) => {
2924 debug!("struct named {:?}", base_t);
2925 base_def.struct_variant()
2926 .find_field_named(field.node)
2927 .map(|f| fcx.field_ty(expr.span, f, substs))
2934 fcx.write_ty(expr.id, field_ty);
2935 fcx.write_autoderef_adjustment(base.id, autoderefs);
2941 if method::exists(fcx, field.span, field.node, expr_t, expr.id) {
2942 fcx.type_error_struct(field.span,
2944 format!("attempted to take value of method `{}` on type \
2945 `{}`", field.node, actual)
2948 .fileline_help(field.span,
2949 "maybe a `()` to call it is missing? \
2950 If not, try an anonymous function")
2953 let mut err = fcx.type_error_struct(
2956 format!("attempted access of field `{}` on \
2957 type `{}`, but no field with that \
2963 if let ty::TyStruct(def, _) = expr_t.sty {
2964 suggest_field_names(&mut err, def.struct_variant(), field, vec![]);
2969 fcx.write_error(expr.id);
2972 // displays hints about the closest matches in field names
2973 fn suggest_field_names<'tcx>(err: &mut DiagnosticBuilder,
2974 variant: ty::VariantDef<'tcx>,
2975 field: &Spanned<ast::Name>,
2976 skip : Vec<InternedString>) {
2977 let name = field.node.as_str();
2978 let names = variant.fields
2980 .filter_map(|ref field| {
2981 // ignore already set fields and private fields from non-local crates
2982 if skip.iter().any(|x| *x == field.name.as_str()) ||
2983 (variant.did.krate != LOCAL_CRATE && field.vis != Visibility::Public) {
2990 // only find fits with at least one matching letter
2991 if let Some(name) = find_best_match_for_name(names, &name, Some(name.len())) {
2992 err.span_help(field.span,
2993 &format!("did you mean `{}`?", name));
2997 // Check tuple index expressions
2998 fn check_tup_field<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
2999 expr: &'tcx hir::Expr,
3000 lvalue_pref: LvaluePreference,
3001 base: &'tcx hir::Expr,
3002 idx: codemap::Spanned<usize>) {
3003 check_expr_with_lvalue_pref(fcx, base, lvalue_pref);
3004 let expr_t = structurally_resolved_type(fcx, expr.span,
3006 let mut tuple_like = false;
3007 // FIXME(eddyb) #12808 Integrate privacy into this auto-deref loop.
3008 let (_, autoderefs, field_ty) = autoderef(fcx,
3012 UnresolvedTypeAction::Error,
3016 ty::TyStruct(base_def, substs) => {
3017 tuple_like = base_def.struct_variant().is_tuple_struct();
3019 debug!("tuple struct named {:?}", base_t);
3020 base_def.struct_variant()
3023 .map(|f| fcx.field_ty(expr.span, f, substs))
3028 ty::TyTuple(ref v) => {
3030 if idx.node < v.len() { Some(v[idx.node]) } else { None }
3037 fcx.write_ty(expr.id, field_ty);
3038 fcx.write_autoderef_adjustment(base.id, autoderefs);
3043 fcx.type_error_message(
3047 format!("attempted out-of-bounds tuple index `{}` on \
3052 format!("attempted tuple index `{}` on type `{}`, but the \
3053 type was not a tuple or tuple struct",
3060 fcx.write_error(expr.id);
3063 fn report_unknown_field<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
3065 variant: ty::VariantDef<'tcx>,
3067 skip_fields: &[hir::Field]) {
3068 let mut err = fcx.type_error_struct(
3070 |actual| if let ty::TyEnum(..) = ty.sty {
3071 format!("struct variant `{}::{}` has no field named `{}`",
3072 actual, variant.name.as_str(), field.name.node)
3074 format!("structure `{}` has no field named `{}`",
3075 actual, field.name.node)
3079 // prevent all specified fields from being suggested
3080 let skip_fields = skip_fields.iter().map(|ref x| x.name.node.as_str());
3081 suggest_field_names(&mut err, variant, &field.name, skip_fields.collect());
3085 fn check_expr_struct_fields<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
3088 variant: ty::VariantDef<'tcx>,
3089 ast_fields: &'tcx [hir::Field],
3090 check_completeness: bool) {
3091 let tcx = fcx.ccx.tcx;
3092 let substs = match adt_ty.sty {
3093 ty::TyStruct(_, substs) | ty::TyEnum(_, substs) => substs,
3094 _ => tcx.sess.span_bug(span, "non-ADT passed to check_expr_struct_fields")
3097 let mut remaining_fields = FnvHashMap();
3098 for field in &variant.fields {
3099 remaining_fields.insert(field.name, field);
3102 let mut error_happened = false;
3104 // Typecheck each field.
3105 for field in ast_fields {
3106 let expected_field_type;
3108 if let Some(v_field) = remaining_fields.remove(&field.name.node) {
3109 expected_field_type = fcx.field_ty(field.span, v_field, substs);
3111 error_happened = true;
3112 expected_field_type = tcx.types.err;
3113 if let Some(_) = variant.find_field_named(field.name.node) {
3114 span_err!(fcx.tcx().sess, field.name.span, E0062,
3115 "field `{}` specified more than once",
3118 report_unknown_field(fcx, adt_ty, variant, field, ast_fields);
3122 // Make sure to give a type to the field even if there's
3123 // an error, so we can continue typechecking
3124 check_expr_coercable_to_type(fcx, &*field.expr, expected_field_type);
3127 // Make sure the programmer specified all the fields.
3128 if check_completeness &&
3130 !remaining_fields.is_empty()
3132 span_err!(tcx.sess, span, E0063,
3133 "missing field{} {} in initializer of `{}`",
3134 if remaining_fields.len() == 1 {""} else {"s"},
3135 remaining_fields.keys()
3136 .map(|n| format!("`{}`", n))
3137 .collect::<Vec<_>>()
3144 fn check_struct_fields_on_error<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
3146 fields: &'tcx [hir::Field],
3147 base_expr: &'tcx Option<P<hir::Expr>>) {
3148 // Make sure to still write the types
3149 // otherwise we might ICE
3150 fcx.write_error(id);
3151 for field in fields {
3152 check_expr(fcx, &*field.expr);
3155 Some(ref base) => check_expr(fcx, &**base),
3160 fn check_expr_struct<'a, 'tcx>(fcx: &FnCtxt<'a,'tcx>,
3163 fields: &'tcx [hir::Field],
3164 base_expr: &'tcx Option<P<hir::Expr>>)
3166 let tcx = fcx.tcx();
3168 // Find the relevant variant
3169 let def = lookup_full_def(tcx, path.span, expr.id);
3170 if def == def::DefErr {
3171 check_struct_fields_on_error(fcx, expr.id, fields, base_expr);
3174 let (adt, variant) = match fcx.def_struct_variant(def, path.span) {
3175 Some((adt, variant)) => (adt, variant),
3177 span_err!(fcx.tcx().sess, path.span, E0071,
3178 "`{}` does not name a structure",
3179 pprust::path_to_string(path));
3180 check_struct_fields_on_error(fcx, expr.id, fields, base_expr);
3185 let expr_ty = fcx.instantiate_type(def.def_id(), path);
3186 fcx.write_ty(expr.id, expr_ty);
3188 check_expr_struct_fields(fcx, expr_ty, expr.span, variant, fields,
3189 base_expr.is_none());
3191 if let &Some(ref base_expr) = base_expr {
3192 check_expr_has_type(fcx, base_expr, expr_ty);
3193 if adt.adt_kind() == ty::AdtKind::Enum {
3194 span_err!(tcx.sess, base_expr.span, E0436,
3195 "functional record update syntax requires a struct");
3200 type ExprCheckerWithTy = fn(&FnCtxt, &hir::Expr, Ty);
3202 let tcx = fcx.ccx.tcx;
3205 hir::ExprBox(ref subexpr) => {
3206 let expected_inner = expected.to_option(fcx).map_or(NoExpectation, |ty| {
3208 ty::TyBox(ty) => Expectation::rvalue_hint(tcx, ty),
3212 check_expr_with_expectation(fcx, subexpr, expected_inner);
3213 let referent_ty = fcx.expr_ty(&**subexpr);
3214 fcx.write_ty(id, tcx.mk_box(referent_ty));
3217 hir::ExprLit(ref lit) => {
3218 let typ = check_lit(fcx, &**lit, expected);
3219 fcx.write_ty(id, typ);
3221 hir::ExprBinary(op, ref lhs, ref rhs) => {
3222 op::check_binop(fcx, expr, op, lhs, rhs);
3224 hir::ExprAssignOp(op, ref lhs, ref rhs) => {
3225 op::check_binop_assign(fcx, expr, op, lhs, rhs);
3227 hir::ExprUnary(unop, ref oprnd) => {
3228 let expected_inner = match unop {
3229 hir::UnNot | hir::UnNeg => {
3236 let lvalue_pref = match unop {
3237 hir::UnDeref => lvalue_pref,
3240 check_expr_with_expectation_and_lvalue_pref(
3241 fcx, &**oprnd, expected_inner, lvalue_pref);
3242 let mut oprnd_t = fcx.expr_ty(&**oprnd);
3244 if !oprnd_t.references_error() {
3247 oprnd_t = structurally_resolved_type(fcx, expr.span, oprnd_t);
3248 oprnd_t = match oprnd_t.builtin_deref(true, NoPreference) {
3250 None => match try_overloaded_deref(fcx, expr.span,
3251 Some(MethodCall::expr(expr.id)),
3252 Some(&**oprnd), oprnd_t, lvalue_pref) {
3255 fcx.type_error_message(expr.span, |actual| {
3256 format!("type `{}` cannot be \
3257 dereferenced", actual)
3265 oprnd_t = structurally_resolved_type(fcx, oprnd.span,
3267 if !(oprnd_t.is_integral() || oprnd_t.sty == ty::TyBool) {
3268 oprnd_t = op::check_user_unop(fcx, "!", "not",
3269 tcx.lang_items.not_trait(),
3270 expr, &**oprnd, oprnd_t, unop);
3274 oprnd_t = structurally_resolved_type(fcx, oprnd.span,
3276 if !(oprnd_t.is_integral() || oprnd_t.is_fp()) {
3277 oprnd_t = op::check_user_unop(fcx, "-", "neg",
3278 tcx.lang_items.neg_trait(),
3279 expr, &**oprnd, oprnd_t, unop);
3284 fcx.write_ty(id, oprnd_t);
3286 hir::ExprAddrOf(mutbl, ref oprnd) => {
3287 let hint = expected.only_has_type(fcx).map_or(NoExpectation, |ty| {
3289 ty::TyRef(_, ref mt) | ty::TyRawPtr(ref mt) => {
3290 if fcx.tcx().expr_is_lval(&**oprnd) {
3291 // Lvalues may legitimately have unsized types.
3292 // For example, dereferences of a fat pointer and
3293 // the last field of a struct can be unsized.
3294 ExpectHasType(mt.ty)
3296 Expectation::rvalue_hint(tcx, mt.ty)
3302 let lvalue_pref = LvaluePreference::from_mutbl(mutbl);
3303 check_expr_with_expectation_and_lvalue_pref(fcx,
3308 let tm = ty::TypeAndMut { ty: fcx.expr_ty(&**oprnd), mutbl: mutbl };
3309 let oprnd_t = if tm.ty.references_error() {
3312 // Note: at this point, we cannot say what the best lifetime
3313 // is to use for resulting pointer. We want to use the
3314 // shortest lifetime possible so as to avoid spurious borrowck
3315 // errors. Moreover, the longest lifetime will depend on the
3316 // precise details of the value whose address is being taken
3317 // (and how long it is valid), which we don't know yet until type
3318 // inference is complete.
3320 // Therefore, here we simply generate a region variable. The
3321 // region inferencer will then select the ultimate value.
3322 // Finally, borrowck is charged with guaranteeing that the
3323 // value whose address was taken can actually be made to live
3324 // as long as it needs to live.
3325 let region = fcx.infcx().next_region_var(infer::AddrOfRegion(expr.span));
3326 tcx.mk_ref(tcx.mk_region(region), tm)
3328 fcx.write_ty(id, oprnd_t);
3330 hir::ExprPath(ref maybe_qself, ref path) => {
3331 let opt_self_ty = maybe_qself.as_ref().map(|qself| {
3332 fcx.to_ty(&qself.ty)
3335 let path_res = if let Some(&d) = tcx.def_map.borrow().get(&id) {
3337 } else if let Some(hir::QSelf { position: 0, .. }) = *maybe_qself {
3338 // Create some fake resolution that can't possibly be a type.
3339 def::PathResolution {
3340 base_def: def::DefMod(tcx.map.local_def_id(ast::CRATE_NODE_ID)),
3341 last_private: LastMod(AllPublic),
3342 depth: path.segments.len()
3345 tcx.sess.span_bug(expr.span,
3346 &format!("unbound path {:?}", expr))
3349 if let Some((opt_ty, segments, def)) =
3350 resolve_ty_and_def_ufcs(fcx, path_res, opt_self_ty, path,
3351 expr.span, expr.id) {
3352 if def != def::DefErr {
3353 let (scheme, predicates) = type_scheme_and_predicates_for_def(fcx,
3356 instantiate_path(fcx,
3365 fcx.write_ty(id, fcx.tcx().types.err);
3369 // We always require that the type provided as the value for
3370 // a type parameter outlives the moment of instantiation.
3371 fcx.opt_node_ty_substs(expr.id, |item_substs| {
3372 fcx.add_wf_bounds(&item_substs.substs, expr);
3375 hir::ExprInlineAsm(ref ia) => {
3376 for &(_, ref input) in &ia.inputs {
3377 check_expr(fcx, &**input);
3379 for out in &ia.outputs {
3380 check_expr(fcx, &*out.expr);
3384 hir::ExprBreak(_) => { fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); }
3385 hir::ExprAgain(_) => { fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); }
3386 hir::ExprRet(ref expr_opt) => {
3388 ty::FnConverging(result_type) => {
3391 if let Err(_) = fcx.mk_eqty(false, TypeOrigin::Misc(expr.span),
3392 result_type, fcx.tcx().mk_nil()) {
3393 span_err!(tcx.sess, expr.span, E0069,
3394 "`return;` in a function whose return type is \
3398 check_expr_coercable_to_type(fcx, &**e, result_type);
3402 ty::FnDiverging => {
3403 if let Some(ref e) = *expr_opt {
3404 check_expr(fcx, &**e);
3406 span_err!(tcx.sess, expr.span, E0166,
3407 "`return` in a function declared as diverging");
3410 fcx.write_ty(id, fcx.infcx().next_diverging_ty_var());
3412 hir::ExprAssign(ref lhs, ref rhs) => {
3413 check_expr_with_lvalue_pref(fcx, &**lhs, PreferMutLvalue);
3415 let tcx = fcx.tcx();
3416 if !tcx.expr_is_lval(&**lhs) {
3417 span_err!(tcx.sess, expr.span, E0070,
3418 "invalid left-hand side expression");
3421 let lhs_ty = fcx.expr_ty(&**lhs);
3422 check_expr_coercable_to_type(fcx, &**rhs, lhs_ty);
3423 let rhs_ty = fcx.expr_ty(&**rhs);
3425 fcx.require_expr_have_sized_type(&**lhs, traits::AssignmentLhsSized);
3427 if lhs_ty.references_error() || rhs_ty.references_error() {
3428 fcx.write_error(id);
3433 hir::ExprIf(ref cond, ref then_blk, ref opt_else_expr) => {
3434 check_then_else(fcx, &**cond, &**then_blk, opt_else_expr.as_ref().map(|e| &**e),
3435 id, expr.span, expected);
3437 hir::ExprWhile(ref cond, ref body, _) => {
3438 check_expr_has_type(fcx, &**cond, tcx.types.bool);
3439 check_block_no_value(fcx, &**body);
3440 let cond_ty = fcx.expr_ty(&**cond);
3441 let body_ty = fcx.node_ty(body.id);
3442 if cond_ty.references_error() || body_ty.references_error() {
3443 fcx.write_error(id);
3449 hir::ExprLoop(ref body, _) => {
3450 check_block_no_value(fcx, &**body);
3451 if !may_break(tcx, expr.id, &**body) {
3452 fcx.write_ty(id, fcx.infcx().next_diverging_ty_var());
3457 hir::ExprMatch(ref discrim, ref arms, match_src) => {
3458 _match::check_match(fcx, expr, &**discrim, arms, expected, match_src);
3460 hir::ExprClosure(capture, ref decl, ref body) => {
3461 closure::check_expr_closure(fcx, expr, capture, &**decl, &**body, expected);
3463 hir::ExprBlock(ref b) => {
3464 check_block_with_expected(fcx, &**b, expected);
3465 fcx.write_ty(id, fcx.node_ty(b.id));
3467 hir::ExprCall(ref callee, ref args) => {
3468 callee::check_call(fcx, expr, &**callee, &args[..], expected);
3470 // we must check that return type of called functions is WF:
3471 let ret_ty = fcx.expr_ty(expr);
3472 fcx.register_wf_obligation(ret_ty, expr.span, traits::MiscObligation);
3474 hir::ExprMethodCall(name, ref tps, ref args) => {
3475 check_method_call(fcx, expr, name, &args[..], &tps[..], expected, lvalue_pref);
3476 let arg_tys = args.iter().map(|a| fcx.expr_ty(&**a));
3477 let args_err = arg_tys.fold(false, |rest_err, a| rest_err || a.references_error());
3479 fcx.write_error(id);
3482 hir::ExprCast(ref e, ref t) => {
3483 if let hir::TyFixedLengthVec(_, ref count_expr) = t.node {
3484 check_expr_with_hint(fcx, &**count_expr, tcx.types.usize);
3487 // Find the type of `e`. Supply hints based on the type we are casting to,
3489 let t_cast = fcx.to_ty(t);
3490 let t_cast = structurally_resolved_type(fcx, expr.span, t_cast);
3491 check_expr_with_expectation(fcx, e, ExpectCastableToType(t_cast));
3492 let t_expr = fcx.expr_ty(e);
3493 let t_cast = fcx.infcx().resolve_type_vars_if_possible(&t_cast);
3495 // Eagerly check for some obvious errors.
3496 if t_expr.references_error() || t_cast.references_error() {
3497 fcx.write_error(id);
3498 } else if !fcx.type_is_known_to_be_sized(t_cast, expr.span) {
3499 report_cast_to_unsized_type(fcx, expr.span, t.span, e.span, t_cast, t_expr, id);
3501 // Write a type for the whole expression, assuming everything is going
3503 fcx.write_ty(id, t_cast);
3505 // Defer other checks until we're done type checking.
3506 let mut deferred_cast_checks = fcx.inh.deferred_cast_checks.borrow_mut();
3507 let cast_check = cast::CastCheck::new((**e).clone(), t_expr, t_cast, expr.span);
3508 deferred_cast_checks.push(cast_check);
3511 hir::ExprType(ref e, ref t) => {
3512 let typ = fcx.to_ty(&**t);
3513 check_expr_eq_type(fcx, &**e, typ);
3514 fcx.write_ty(id, typ);
3516 hir::ExprVec(ref args) => {
3517 let uty = expected.to_option(fcx).and_then(|uty| {
3519 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3524 let typ = match uty {
3527 check_expr_coercable_to_type(fcx, &**e, uty);
3532 let t: Ty = fcx.infcx().next_ty_var();
3534 check_expr_has_type(fcx, &**e, t);
3539 let typ = tcx.mk_array(typ, args.len());
3540 fcx.write_ty(id, typ);
3542 hir::ExprRepeat(ref element, ref count_expr) => {
3543 check_expr_has_type(fcx, &**count_expr, tcx.types.usize);
3544 let count = fcx.tcx().eval_repeat_count(&**count_expr);
3546 let uty = match expected {
3547 ExpectHasType(uty) => {
3549 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3556 let (element_ty, t) = match uty {
3558 check_expr_coercable_to_type(fcx, &**element, uty);
3562 let t: Ty = fcx.infcx().next_ty_var();
3563 check_expr_has_type(fcx, &**element, t);
3564 (fcx.expr_ty(&**element), t)
3569 // For [foo, ..n] where n > 1, `foo` must have
3571 fcx.require_type_meets(
3578 if element_ty.references_error() {
3579 fcx.write_error(id);
3581 let t = tcx.mk_array(t, count);
3582 fcx.write_ty(id, t);
3585 hir::ExprTup(ref elts) => {
3586 let flds = expected.only_has_type(fcx).and_then(|ty| {
3588 ty::TyTuple(ref flds) => Some(&flds[..]),
3592 let mut err_field = false;
3594 let elt_ts = elts.iter().enumerate().map(|(i, e)| {
3595 let t = match flds {
3596 Some(ref fs) if i < fs.len() => {
3598 check_expr_coercable_to_type(fcx, &**e, ety);
3602 check_expr_with_expectation(fcx, &**e, NoExpectation);
3606 err_field = err_field || t.references_error();
3610 fcx.write_error(id);
3612 let typ = tcx.mk_tup(elt_ts);
3613 fcx.write_ty(id, typ);
3616 hir::ExprStruct(ref path, ref fields, ref base_expr) => {
3617 check_expr_struct(fcx, expr, path, fields, base_expr);
3619 fcx.require_expr_have_sized_type(expr, traits::StructInitializerSized);
3621 hir::ExprField(ref base, ref field) => {
3622 check_field(fcx, expr, lvalue_pref, &**base, field);
3624 hir::ExprTupField(ref base, idx) => {
3625 check_tup_field(fcx, expr, lvalue_pref, &**base, idx);
3627 hir::ExprIndex(ref base, ref idx) => {
3628 check_expr_with_lvalue_pref(fcx, &**base, lvalue_pref);
3629 check_expr(fcx, &**idx);
3631 let base_t = fcx.expr_ty(&**base);
3632 let idx_t = fcx.expr_ty(&**idx);
3634 if base_t.references_error() {
3635 fcx.write_ty(id, base_t);
3636 } else if idx_t.references_error() {
3637 fcx.write_ty(id, idx_t);
3639 let base_t = structurally_resolved_type(fcx, expr.span, base_t);
3640 match lookup_indexing(fcx, expr, base, base_t, idx_t, lvalue_pref) {
3641 Some((index_ty, element_ty)) => {
3642 let idx_expr_ty = fcx.expr_ty(idx);
3643 demand::eqtype(fcx, expr.span, index_ty, idx_expr_ty);
3644 fcx.write_ty(id, element_ty);
3647 check_expr_has_type(fcx, &**idx, fcx.tcx().types.err);
3648 fcx.type_error_message(
3651 format!("cannot index a value of type `{}`",
3656 fcx.write_ty(id, fcx.tcx().types.err);
3661 hir::ExprRange(ref start, ref end) => {
3662 let t_start = start.as_ref().map(|e| {
3663 check_expr(fcx, &**e);
3666 let t_end = end.as_ref().map(|e| {
3667 check_expr(fcx, &**e);
3671 let idx_type = match (t_start, t_end) {
3672 (Some(ty), None) | (None, Some(ty)) => {
3675 (Some(t_start), Some(t_end)) if (t_start.references_error() ||
3676 t_end.references_error()) => {
3677 Some(fcx.tcx().types.err)
3679 (Some(t_start), Some(t_end)) => {
3680 Some(infer::common_supertype(fcx.infcx(),
3681 TypeOrigin::RangeExpression(expr.span),
3689 // Note that we don't check the type of start/end satisfy any
3690 // bounds because right now the range structs do not have any. If we add
3691 // some bounds, then we'll need to check `t_start` against them here.
3693 let range_type = match idx_type {
3694 Some(idx_type) if idx_type.references_error() => {
3698 // Find the did from the appropriate lang item.
3699 let did = match (start, end) {
3700 (&Some(_), &Some(_)) => tcx.lang_items.range_struct(),
3701 (&Some(_), &None) => tcx.lang_items.range_from_struct(),
3702 (&None, &Some(_)) => tcx.lang_items.range_to_struct(),
3704 tcx.sess.span_bug(expr.span, "full range should be dealt with above")
3708 if let Some(did) = did {
3709 let def = tcx.lookup_adt_def(did);
3710 let predicates = tcx.lookup_predicates(did);
3711 let substs = Substs::new_type(vec![idx_type], vec![]);
3712 let bounds = fcx.instantiate_bounds(expr.span, &substs, &predicates);
3713 fcx.add_obligations_for_parameters(
3714 traits::ObligationCause::new(expr.span,
3716 traits::ItemObligation(did)),
3719 tcx.mk_struct(def, tcx.mk_substs(substs))
3721 span_err!(tcx.sess, expr.span, E0236, "no lang item for range syntax");
3726 // Neither start nor end => RangeFull
3727 if let Some(did) = tcx.lang_items.range_full_struct() {
3729 tcx.lookup_adt_def(did),
3730 tcx.mk_substs(Substs::empty())
3733 span_err!(tcx.sess, expr.span, E0237, "no lang item for range syntax");
3739 fcx.write_ty(id, range_type);
3744 debug!("type of expr({}) {} is...", expr.id,
3745 pprust::expr_to_string(expr));
3746 debug!("... {:?}, expected is {:?}",
3753 pub fn resolve_ty_and_def_ufcs<'a, 'b, 'tcx>(fcx: &FnCtxt<'b, 'tcx>,
3754 path_res: def::PathResolution,
3755 opt_self_ty: Option<Ty<'tcx>>,
3756 path: &'a hir::Path,
3758 node_id: ast::NodeId)
3759 -> Option<(Option<Ty<'tcx>>,
3760 &'a [hir::PathSegment],
3764 // If fully resolved already, we don't have to do anything.
3765 if path_res.depth == 0 {
3766 Some((opt_self_ty, &path.segments, path_res.base_def))
3768 let mut def = path_res.base_def;
3769 let ty_segments = path.segments.split_last().unwrap().1;
3770 let base_ty_end = path.segments.len() - path_res.depth;
3771 let ty = astconv::finish_resolving_def_to_ty(fcx, fcx, span,
3772 PathParamMode::Optional,
3775 &ty_segments[..base_ty_end],
3776 &ty_segments[base_ty_end..]);
3777 let item_segment = path.segments.last().unwrap();
3778 let item_name = item_segment.identifier.name;
3779 match method::resolve_ufcs(fcx, span, item_name, ty, node_id) {
3781 // Write back the new resolution.
3782 fcx.ccx.tcx.def_map.borrow_mut()
3783 .insert(node_id, def::PathResolution {
3785 last_private: path_res.last_private.or(lp),
3788 Some((Some(ty), slice::ref_slice(item_segment), def))
3791 method::report_error(fcx, span, ty,
3792 item_name, None, error);
3793 fcx.write_error(node_id);
3800 impl<'tcx> Expectation<'tcx> {
3801 /// Provide an expectation for an rvalue expression given an *optional*
3802 /// hint, which is not required for type safety (the resulting type might
3803 /// be checked higher up, as is the case with `&expr` and `box expr`), but
3804 /// is useful in determining the concrete type.
3806 /// The primary use case is where the expected type is a fat pointer,
3807 /// like `&[isize]`. For example, consider the following statement:
3809 /// let x: &[isize] = &[1, 2, 3];
3811 /// In this case, the expected type for the `&[1, 2, 3]` expression is
3812 /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
3813 /// expectation `ExpectHasType([isize])`, that would be too strong --
3814 /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
3815 /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
3816 /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
3817 /// which still is useful, because it informs integer literals and the like.
3818 /// See the test case `test/run-pass/coerce-expect-unsized.rs` and #20169
3819 /// for examples of where this comes up,.
3820 fn rvalue_hint(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> {
3821 match tcx.struct_tail(ty).sty {
3822 ty::TySlice(_) | ty::TyTrait(..) => {
3823 ExpectRvalueLikeUnsized(ty)
3825 _ => ExpectHasType(ty)
3829 // Resolves `expected` by a single level if it is a variable. If
3830 // there is no expected type or resolution is not possible (e.g.,
3831 // no constraints yet present), just returns `None`.
3832 fn resolve<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> {
3837 ExpectCastableToType(t) => {
3838 ExpectCastableToType(
3839 fcx.infcx().resolve_type_vars_if_possible(&t))
3841 ExpectHasType(t) => {
3843 fcx.infcx().resolve_type_vars_if_possible(&t))
3845 ExpectRvalueLikeUnsized(t) => {
3846 ExpectRvalueLikeUnsized(
3847 fcx.infcx().resolve_type_vars_if_possible(&t))
3852 fn to_option<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
3853 match self.resolve(fcx) {
3854 NoExpectation => None,
3855 ExpectCastableToType(ty) |
3857 ExpectRvalueLikeUnsized(ty) => Some(ty),
3861 fn only_has_type<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
3862 match self.resolve(fcx) {
3863 ExpectHasType(ty) => Some(ty),
3869 pub fn check_decl_initializer<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
3870 local: &'tcx hir::Local,
3871 init: &'tcx hir::Expr)
3873 let ref_bindings = fcx.tcx().pat_contains_ref_binding(&local.pat);
3875 let local_ty = fcx.local_ty(init.span, local.id);
3876 if let Some(m) = ref_bindings {
3877 // Somewhat subtle: if we have a `ref` binding in the pattern,
3878 // we want to avoid introducing coercions for the RHS. This is
3879 // both because it helps preserve sanity and, in the case of
3880 // ref mut, for soundness (issue #23116). In particular, in
3881 // the latter case, we need to be clear that the type of the
3882 // referent for the reference that results is *equal to* the
3883 // type of the lvalue it is referencing, and not some
3884 // supertype thereof.
3885 check_expr_with_lvalue_pref(fcx, init, LvaluePreference::from_mutbl(m));
3886 let init_ty = fcx.expr_ty(init);
3887 demand::eqtype(fcx, init.span, init_ty, local_ty);
3889 check_expr_coercable_to_type(fcx, init, local_ty)
3893 pub fn check_decl_local<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, local: &'tcx hir::Local) {
3894 let tcx = fcx.ccx.tcx;
3896 let t = fcx.local_ty(local.span, local.id);
3897 fcx.write_ty(local.id, t);
3899 if let Some(ref init) = local.init {
3900 check_decl_initializer(fcx, local, &**init);
3901 let init_ty = fcx.expr_ty(&**init);
3902 if init_ty.references_error() {
3903 fcx.write_ty(local.id, init_ty);
3907 let pcx = pat_ctxt {
3909 map: pat_id_map(&tcx.def_map, &*local.pat),
3911 _match::check_pat(&pcx, &*local.pat, t);
3912 let pat_ty = fcx.node_ty(local.pat.id);
3913 if pat_ty.references_error() {
3914 fcx.write_ty(local.id, pat_ty);
3918 pub fn check_stmt<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, stmt: &'tcx hir::Stmt) {
3920 let mut saw_bot = false;
3921 let mut saw_err = false;
3923 hir::StmtDecl(ref decl, id) => {
3926 hir::DeclLocal(ref l) => {
3927 check_decl_local(fcx, &**l);
3928 let l_t = fcx.node_ty(l.id);
3929 saw_bot = saw_bot || fcx.infcx().type_var_diverges(l_t);
3930 saw_err = saw_err || l_t.references_error();
3932 hir::DeclItem(_) => {/* ignore for now */ }
3935 hir::StmtExpr(ref expr, id) => {
3937 // Check with expected type of ()
3938 check_expr_has_type(fcx, &**expr, fcx.tcx().mk_nil());
3939 let expr_ty = fcx.expr_ty(&**expr);
3940 saw_bot = saw_bot || fcx.infcx().type_var_diverges(expr_ty);
3941 saw_err = saw_err || expr_ty.references_error();
3943 hir::StmtSemi(ref expr, id) => {
3945 check_expr(fcx, &**expr);
3946 let expr_ty = fcx.expr_ty(&**expr);
3947 saw_bot |= fcx.infcx().type_var_diverges(expr_ty);
3948 saw_err |= expr_ty.references_error();
3952 fcx.write_ty(node_id, fcx.infcx().next_diverging_ty_var());
3955 fcx.write_error(node_id);
3958 fcx.write_nil(node_id)
3962 pub fn check_block_no_value<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, blk: &'tcx hir::Block) {
3963 check_block_with_expected(fcx, blk, ExpectHasType(fcx.tcx().mk_nil()));
3964 let blkty = fcx.node_ty(blk.id);
3965 if blkty.references_error() {
3966 fcx.write_error(blk.id);
3968 let nilty = fcx.tcx().mk_nil();
3969 demand::suptype(fcx, blk.span, nilty, blkty);
3973 fn check_block_with_expected<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
3974 blk: &'tcx hir::Block,
3975 expected: Expectation<'tcx>) {
3977 let mut fcx_ps = fcx.ps.borrow_mut();
3978 let unsafety_state = fcx_ps.recurse(blk);
3979 replace(&mut *fcx_ps, unsafety_state)
3982 let mut warned = false;
3983 let mut any_diverges = false;
3984 let mut any_err = false;
3985 for s in &blk.stmts {
3987 let s_id = ::rustc_front::util::stmt_id(s);
3988 let s_ty = fcx.node_ty(s_id);
3989 if any_diverges && !warned && match s.node {
3990 hir::StmtDecl(ref decl, _) => {
3992 hir::DeclLocal(_) => true,
3996 hir::StmtExpr(_, _) | hir::StmtSemi(_, _) => true,
4001 .add_lint(lint::builtin::UNREACHABLE_CODE,
4004 "unreachable statement".to_string());
4007 any_diverges = any_diverges || fcx.infcx().type_var_diverges(s_ty);
4008 any_err = any_err || s_ty.references_error();
4011 None => if any_err {
4012 fcx.write_error(blk.id);
4013 } else if any_diverges {
4014 fcx.write_ty(blk.id, fcx.infcx().next_diverging_ty_var());
4016 fcx.write_nil(blk.id);
4019 if any_diverges && !warned {
4023 .add_lint(lint::builtin::UNREACHABLE_CODE,
4026 "unreachable expression".to_string());
4028 let ety = match expected {
4029 ExpectHasType(ety) => {
4030 check_expr_coercable_to_type(fcx, &**e, ety);
4034 check_expr_with_expectation(fcx, &**e, expected);
4040 fcx.write_error(blk.id);
4041 } else if any_diverges {
4042 fcx.write_ty(blk.id, fcx.infcx().next_diverging_ty_var());
4044 fcx.write_ty(blk.id, ety);
4049 *fcx.ps.borrow_mut() = prev;
4052 /// Checks a constant appearing in a type. At the moment this is just the
4053 /// length expression in a fixed-length vector, but someday it might be
4054 /// extended to type-level numeric literals.
4055 fn check_const_in_type<'a,'tcx>(ccx: &'a CrateCtxt<'a,'tcx>,
4056 expr: &'tcx hir::Expr,
4057 expected_type: Ty<'tcx>) {
4058 let tables = RefCell::new(ty::Tables::empty());
4059 let inh = static_inherited_fields(ccx, &tables);
4060 let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(expected_type), expr.id);
4061 check_const_with_ty(&fcx, expr.span, expr, expected_type);
4064 fn check_const<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
4068 let tables = RefCell::new(ty::Tables::empty());
4069 let inh = static_inherited_fields(ccx, &tables);
4070 let rty = ccx.tcx.node_id_to_type(id);
4071 let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(rty), e.id);
4072 let declty = fcx.ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(id)).ty;
4073 check_const_with_ty(&fcx, sp, e, declty);
4076 fn check_const_with_ty<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4080 // Gather locals in statics (because of block expressions).
4081 // This is technically unnecessary because locals in static items are forbidden,
4082 // but prevents type checking from blowing up before const checking can properly
4084 GatherLocalsVisitor { fcx: fcx }.visit_expr(e);
4086 check_expr_with_hint(fcx, e, declty);
4087 demand::coerce(fcx, e.span, declty, e);
4089 fcx.select_all_obligations_and_apply_defaults();
4090 upvar::closure_analyze_const(&fcx, e);
4091 fcx.select_obligations_where_possible();
4093 fcx.select_all_obligations_or_error();
4095 regionck::regionck_expr(fcx, e);
4096 writeback::resolve_type_vars_in_expr(fcx, e);
4099 /// Checks whether a type can be represented in memory. In particular, it
4100 /// identifies types that contain themselves without indirection through a
4101 /// pointer, which would mean their size is unbounded.
4102 pub fn check_representable(tcx: &ty::ctxt,
4104 item_id: ast::NodeId,
4105 _designation: &str) -> bool {
4106 let rty = tcx.node_id_to_type(item_id);
4108 // Check that it is possible to represent this type. This call identifies
4109 // (1) types that contain themselves and (2) types that contain a different
4110 // recursive type. It is only necessary to throw an error on those that
4111 // contain themselves. For case 2, there must be an inner type that will be
4112 // caught by case 1.
4113 match rty.is_representable(tcx, sp) {
4114 Representability::SelfRecursive => {
4115 let item_def_id = tcx.map.local_def_id(item_id);
4116 traits::recursive_type_with_infinite_size_error(tcx, item_def_id).emit();
4119 Representability::Representable | Representability::ContainsRecursive => (),
4124 pub fn check_simd(tcx: &ty::ctxt, sp: Span, id: ast::NodeId) {
4125 let t = tcx.node_id_to_type(id);
4127 ty::TyStruct(def, substs) => {
4128 let fields = &def.struct_variant().fields;
4129 if fields.is_empty() {
4130 span_err!(tcx.sess, sp, E0075, "SIMD vector cannot be empty");
4133 let e = fields[0].ty(tcx, substs);
4134 if !fields.iter().all(|f| f.ty(tcx, substs) == e) {
4135 span_err!(tcx.sess, sp, E0076, "SIMD vector should be homogeneous");
4139 ty::TyParam(_) => { /* struct<T>(T, T, T, T) is ok */ }
4140 _ if e.is_machine() => { /* struct(u8, u8, u8, u8) is ok */ }
4142 span_err!(tcx.sess, sp, E0077,
4143 "SIMD vector element type should be machine type");
4152 pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
4154 vs: &'tcx [hir::Variant],
4157 fn disr_in_range(ccx: &CrateCtxt,
4159 disr: ty::Disr) -> bool {
4160 fn uint_in_range(ccx: &CrateCtxt, ty: ast::UintTy, disr: ty::Disr) -> bool {
4162 ast::TyU8 => disr as u8 as Disr == disr,
4163 ast::TyU16 => disr as u16 as Disr == disr,
4164 ast::TyU32 => disr as u32 as Disr == disr,
4165 ast::TyU64 => disr as u64 as Disr == disr,
4166 ast::TyUs => uint_in_range(ccx, ccx.tcx.sess.target.uint_type, disr)
4169 fn int_in_range(ccx: &CrateCtxt, ty: ast::IntTy, disr: ty::Disr) -> bool {
4171 ast::TyI8 => disr as i8 as Disr == disr,
4172 ast::TyI16 => disr as i16 as Disr == disr,
4173 ast::TyI32 => disr as i32 as Disr == disr,
4174 ast::TyI64 => disr as i64 as Disr == disr,
4175 ast::TyIs => int_in_range(ccx, ccx.tcx.sess.target.int_type, disr)
4179 attr::UnsignedInt(ty) => uint_in_range(ccx, ty, disr),
4180 attr::SignedInt(ty) => int_in_range(ccx, ty, disr)
4184 fn do_check<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
4185 vs: &'tcx [hir::Variant],
4187 hint: attr::ReprAttr) {
4188 #![allow(trivial_numeric_casts)]
4190 let rty = ccx.tcx.node_id_to_type(id);
4191 let mut disr_vals: Vec<ty::Disr> = Vec::new();
4193 let tables = RefCell::new(ty::Tables::empty());
4194 let inh = static_inherited_fields(ccx, &tables);
4195 let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(rty), id);
4197 let (_, repr_type_ty) = ccx.tcx.enum_repr_type(Some(&hint));
4199 if let Some(ref e) = v.node.disr_expr {
4200 check_const_with_ty(&fcx, e.span, e, repr_type_ty);
4204 let def_id = ccx.tcx.map.local_def_id(id);
4206 let variants = &ccx.tcx.lookup_adt_def(def_id).variants;
4207 for (v, variant) in vs.iter().zip(variants.iter()) {
4208 let current_disr_val = variant.disr_val;
4210 // Check for duplicate discriminant values
4211 match disr_vals.iter().position(|&x| x == current_disr_val) {
4213 let mut err = struct_span_err!(ccx.tcx.sess, v.span, E0081,
4214 "discriminant value `{}` already exists", disr_vals[i]);
4215 let variant_i_node_id = ccx.tcx.map.as_local_node_id(variants[i].did).unwrap();
4216 span_note!(&mut err, ccx.tcx.map.span(variant_i_node_id),
4217 "conflicting discriminant here");
4222 // Check for unrepresentable discriminant values
4224 attr::ReprAny | attr::ReprExtern => (),
4225 attr::ReprInt(sp, ity) => {
4226 if !disr_in_range(ccx, ity, current_disr_val) {
4227 let mut err = struct_span_err!(ccx.tcx.sess, v.span, E0082,
4228 "discriminant value outside specified type");
4229 span_note!(&mut err, sp,
4230 "discriminant type specified here");
4235 ccx.tcx.sess.bug("range_to_inttype: found ReprSimd on an enum");
4237 attr::ReprPacked => {
4238 ccx.tcx.sess.bug("range_to_inttype: found ReprPacked on an enum");
4241 disr_vals.push(current_disr_val);
4245 let def_id = ccx.tcx.map.local_def_id(id);
4246 let hint = *ccx.tcx.lookup_repr_hints(def_id).get(0).unwrap_or(&attr::ReprAny);
4248 if hint != attr::ReprAny && vs.len() <= 1 {
4250 span_err!(ccx.tcx.sess, sp, E0083,
4251 "unsupported representation for univariant enum");
4253 span_err!(ccx.tcx.sess, sp, E0084,
4254 "unsupported representation for zero-variant enum");
4258 do_check(ccx, vs, id, hint);
4260 check_representable(ccx.tcx, sp, id, "enum");
4263 // Returns the type parameter count and the type for the given definition.
4264 fn type_scheme_and_predicates_for_def<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4267 -> (TypeScheme<'tcx>, GenericPredicates<'tcx>) {
4269 def::DefLocal(_, nid) | def::DefUpvar(_, nid, _, _) => {
4270 let typ = fcx.local_ty(sp, nid);
4271 (ty::TypeScheme { generics: ty::Generics::empty(), ty: typ },
4272 ty::GenericPredicates::empty())
4274 def::DefFn(id, _) | def::DefMethod(id) |
4275 def::DefStatic(id, _) | def::DefVariant(_, id, _) |
4276 def::DefStruct(id) | def::DefConst(id) | def::DefAssociatedConst(id) => {
4277 (fcx.tcx().lookup_item_type(id), fcx.tcx().lookup_predicates(id))
4281 def::DefAssociatedTy(..) |
4283 def::DefTyParam(..) |
4285 def::DefForeignMod(..) |
4287 def::DefSelfTy(..) |
4289 fcx.ccx.tcx.sess.span_bug(sp, &format!("expected value, found {:?}", defn));
4294 // Instantiates the given path, which must refer to an item with the given
4295 // number of type parameters and type.
4296 pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4297 segments: &[hir::PathSegment],
4298 type_scheme: TypeScheme<'tcx>,
4299 type_predicates: &ty::GenericPredicates<'tcx>,
4300 opt_self_ty: Option<Ty<'tcx>>,
4303 node_id: ast::NodeId) {
4304 debug!("instantiate_path(path={:?}, def={:?}, node_id={}, type_scheme={:?})",
4310 // We need to extract the type parameters supplied by the user in
4311 // the path `path`. Due to the current setup, this is a bit of a
4312 // tricky-process; the problem is that resolve only tells us the
4313 // end-point of the path resolution, and not the intermediate steps.
4314 // Luckily, we can (at least for now) deduce the intermediate steps
4315 // just from the end-point.
4317 // There are basically four cases to consider:
4319 // 1. Reference to a *type*, such as a struct or enum:
4321 // mod a { struct Foo<T> { ... } }
4323 // Because we don't allow types to be declared within one
4324 // another, a path that leads to a type will always look like
4325 // `a::b::Foo<T>` where `a` and `b` are modules. This implies
4326 // that only the final segment can have type parameters, and
4327 // they are located in the TypeSpace.
4329 // *Note:* Generally speaking, references to types don't
4330 // actually pass through this function, but rather the
4331 // `ast_ty_to_ty` function in `astconv`. However, in the case
4332 // of struct patterns (and maybe literals) we do invoke
4333 // `instantiate_path` to get the general type of an instance of
4334 // a struct. (In these cases, there are actually no type
4335 // parameters permitted at present, but perhaps we will allow
4336 // them in the future.)
4338 // 1b. Reference to an enum variant or tuple-like struct:
4340 // struct foo<T>(...)
4341 // enum E<T> { foo(...) }
4343 // In these cases, the parameters are declared in the type
4346 // 2. Reference to a *fn item*:
4350 // In this case, the path will again always have the form
4351 // `a::b::foo::<T>` where only the final segment should have
4352 // type parameters. However, in this case, those parameters are
4353 // declared on a value, and hence are in the `FnSpace`.
4355 // 3. Reference to a *method*:
4357 // impl<A> SomeStruct<A> {
4361 // Here we can have a path like
4362 // `a::b::SomeStruct::<A>::foo::<B>`, in which case parameters
4363 // may appear in two places. The penultimate segment,
4364 // `SomeStruct::<A>`, contains parameters in TypeSpace, and the
4365 // final segment, `foo::<B>` contains parameters in fn space.
4367 // 4. Reference to an *associated const*:
4369 // impl<A> AnotherStruct<A> {
4370 // const FOO: B = BAR;
4373 // The path in this case will look like
4374 // `a::b::AnotherStruct::<A>::FOO`, so the penultimate segment
4375 // only will have parameters in TypeSpace.
4377 // The first step then is to categorize the segments appropriately.
4379 assert!(!segments.is_empty());
4381 let mut ufcs_associated = None;
4382 let mut segment_spaces: Vec<_>;
4384 // Case 1 and 1b. Reference to a *type* or *enum variant*.
4385 def::DefSelfTy(..) |
4386 def::DefStruct(..) |
4387 def::DefVariant(..) |
4389 def::DefAssociatedTy(..) |
4391 def::DefPrimTy(..) |
4392 def::DefTyParam(..) => {
4393 // Everything but the final segment should have no
4394 // parameters at all.
4395 segment_spaces = vec![None; segments.len() - 1];
4396 segment_spaces.push(Some(subst::TypeSpace));
4399 // Case 2. Reference to a top-level value.
4402 def::DefStatic(..) => {
4403 segment_spaces = vec![None; segments.len() - 1];
4404 segment_spaces.push(Some(subst::FnSpace));
4407 // Case 3. Reference to a method.
4408 def::DefMethod(def_id) => {
4409 let container = fcx.tcx().impl_or_trait_item(def_id).container();
4411 ty::TraitContainer(trait_did) => {
4412 callee::check_legal_trait_for_method_call(fcx.ccx, span, trait_did)
4414 ty::ImplContainer(_) => {}
4417 if segments.len() >= 2 {
4418 segment_spaces = vec![None; segments.len() - 2];
4419 segment_spaces.push(Some(subst::TypeSpace));
4420 segment_spaces.push(Some(subst::FnSpace));
4422 // `<T>::method` will end up here, and so can `T::method`.
4423 let self_ty = opt_self_ty.expect("UFCS sugared method missing Self");
4424 segment_spaces = vec![Some(subst::FnSpace)];
4425 ufcs_associated = Some((container, self_ty));
4429 def::DefAssociatedConst(def_id) => {
4430 let container = fcx.tcx().impl_or_trait_item(def_id).container();
4432 ty::TraitContainer(trait_did) => {
4433 callee::check_legal_trait_for_method_call(fcx.ccx, span, trait_did)
4435 ty::ImplContainer(_) => {}
4438 if segments.len() >= 2 {
4439 segment_spaces = vec![None; segments.len() - 2];
4440 segment_spaces.push(Some(subst::TypeSpace));
4441 segment_spaces.push(None);
4443 // `<T>::CONST` will end up here, and so can `T::CONST`.
4444 let self_ty = opt_self_ty.expect("UFCS sugared const missing Self");
4445 segment_spaces = vec![None];
4446 ufcs_associated = Some((container, self_ty));
4450 // Other cases. Various nonsense that really shouldn't show up
4451 // here. If they do, an error will have been reported
4452 // elsewhere. (I hope)
4454 def::DefForeignMod(..) |
4459 segment_spaces = vec![None; segments.len()];
4462 assert_eq!(segment_spaces.len(), segments.len());
4464 // In `<T as Trait<A, B>>::method`, `A` and `B` are mandatory, but
4465 // `opt_self_ty` can also be Some for `Foo::method`, where Foo's
4466 // type parameters are not mandatory.
4467 let require_type_space = opt_self_ty.is_some() && ufcs_associated.is_none();
4469 debug!("segment_spaces={:?}", segment_spaces);
4471 // Next, examine the definition, and determine how many type
4472 // parameters we expect from each space.
4473 let type_defs = &type_scheme.generics.types;
4474 let region_defs = &type_scheme.generics.regions;
4476 // Now that we have categorized what space the parameters for each
4477 // segment belong to, let's sort out the parameters that the user
4478 // provided (if any) into their appropriate spaces. We'll also report
4479 // errors if type parameters are provided in an inappropriate place.
4480 let mut substs = Substs::empty();
4481 for (opt_space, segment) in segment_spaces.iter().zip(segments) {
4484 prohibit_type_params(fcx.tcx(), slice::ref_slice(segment));
4488 push_explicit_parameters_from_segment_to_substs(fcx,
4498 if let Some(self_ty) = opt_self_ty {
4499 if type_defs.len(subst::SelfSpace) == 1 {
4500 substs.types.push(subst::SelfSpace, self_ty);
4504 // Now we have to compare the types that the user *actually*
4505 // provided against the types that were *expected*. If the user
4506 // did not provide any types, then we want to substitute inference
4507 // variables. If the user provided some types, we may still need
4508 // to add defaults. If the user provided *too many* types, that's
4510 for &space in &[subst::SelfSpace, subst::TypeSpace, subst::FnSpace] {
4511 adjust_type_parameters(fcx, span, space, type_defs,
4512 require_type_space, &mut substs);
4513 assert_eq!(substs.types.len(space), type_defs.len(space));
4515 adjust_region_parameters(fcx, span, space, region_defs, &mut substs);
4516 assert_eq!(substs.regions().len(space), region_defs.len(space));
4519 // The things we are substituting into the type should not contain
4520 // escaping late-bound regions, and nor should the base type scheme.
4521 assert!(!substs.has_regions_escaping_depth(0));
4522 assert!(!type_scheme.has_escaping_regions());
4524 // Add all the obligations that are required, substituting and
4525 // normalized appropriately.
4526 let bounds = fcx.instantiate_bounds(span, &substs, &type_predicates);
4527 fcx.add_obligations_for_parameters(
4528 traits::ObligationCause::new(span, fcx.body_id, traits::ItemObligation(def.def_id())),
4531 // Substitute the values for the type parameters into the type of
4532 // the referenced item.
4533 let ty_substituted = fcx.instantiate_type_scheme(span, &substs, &type_scheme.ty);
4536 if let Some((ty::ImplContainer(impl_def_id), self_ty)) = ufcs_associated {
4537 // In the case of `Foo<T>::method` and `<Foo<T>>::method`, if `method`
4538 // is inherent, there is no `Self` parameter, instead, the impl needs
4539 // type parameters, which we can infer by unifying the provided `Self`
4540 // with the substituted impl type.
4541 let impl_scheme = fcx.tcx().lookup_item_type(impl_def_id);
4542 assert_eq!(substs.types.len(subst::TypeSpace),
4543 impl_scheme.generics.types.len(subst::TypeSpace));
4544 assert_eq!(substs.regions().len(subst::TypeSpace),
4545 impl_scheme.generics.regions.len(subst::TypeSpace));
4547 let impl_ty = fcx.instantiate_type_scheme(span, &substs, &impl_scheme.ty);
4548 if fcx.mk_subty(false, TypeOrigin::Misc(span), self_ty, impl_ty).is_err() {
4549 fcx.tcx().sess.span_bug(span,
4551 "instantiate_path: (UFCS) {:?} was a subtype of {:?} but now is not?",
4557 debug!("instantiate_path: type of {:?} is {:?}",
4560 fcx.write_ty(node_id, ty_substituted);
4561 fcx.write_substs(node_id, ty::ItemSubsts { substs: substs });
4564 /// Finds the parameters that the user provided and adds them to `substs`. If too many
4565 /// parameters are provided, then reports an error and clears the output vector.
4567 /// We clear the output vector because that will cause the `adjust_XXX_parameters()` later to
4568 /// use inference variables. This seems less likely to lead to derived errors.
4570 /// Note that we *do not* check for *too few* parameters here. Due to the presence of defaults
4571 /// etc that is more complicated. I wanted however to do the reporting of *too many* parameters
4572 /// here because we can easily use the precise span of the N+1'th parameter.
4573 fn push_explicit_parameters_from_segment_to_substs<'a, 'tcx>(
4574 fcx: &FnCtxt<'a, 'tcx>,
4575 space: subst::ParamSpace,
4577 type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4578 region_defs: &VecPerParamSpace<ty::RegionParameterDef>,
4579 segment: &hir::PathSegment,
4580 substs: &mut Substs<'tcx>)
4582 match segment.parameters {
4583 hir::AngleBracketedParameters(ref data) => {
4584 push_explicit_angle_bracketed_parameters_from_segment_to_substs(
4585 fcx, space, type_defs, region_defs, data, substs);
4588 hir::ParenthesizedParameters(ref data) => {
4589 span_err!(fcx.tcx().sess, span, E0238,
4590 "parenthesized parameters may only be used with a trait");
4591 push_explicit_parenthesized_parameters_from_segment_to_substs(
4592 fcx, space, span, type_defs, data, substs);
4597 fn push_explicit_angle_bracketed_parameters_from_segment_to_substs<'a, 'tcx>(
4598 fcx: &FnCtxt<'a, 'tcx>,
4599 space: subst::ParamSpace,
4600 type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4601 region_defs: &VecPerParamSpace<ty::RegionParameterDef>,
4602 data: &hir::AngleBracketedParameterData,
4603 substs: &mut Substs<'tcx>)
4606 let type_count = type_defs.len(space);
4607 assert_eq!(substs.types.len(space), 0);
4608 for (i, typ) in data.types.iter().enumerate() {
4609 let t = fcx.to_ty(&**typ);
4611 substs.types.push(space, t);
4612 } else if i == type_count {
4613 span_err!(fcx.tcx().sess, typ.span, E0087,
4614 "too many type parameters provided: \
4615 expected at most {} parameter{}, \
4616 found {} parameter{}",
4618 if type_count == 1 {""} else {"s"},
4620 if data.types.len() == 1 {""} else {"s"});
4621 substs.types.truncate(space, 0);
4627 if !data.bindings.is_empty() {
4628 span_err!(fcx.tcx().sess, data.bindings[0].span, E0182,
4629 "unexpected binding of associated item in expression path \
4630 (only allowed in type paths)");
4634 let region_count = region_defs.len(space);
4635 assert_eq!(substs.regions().len(space), 0);
4636 for (i, lifetime) in data.lifetimes.iter().enumerate() {
4637 let r = ast_region_to_region(fcx.tcx(), lifetime);
4638 if i < region_count {
4639 substs.mut_regions().push(space, r);
4640 } else if i == region_count {
4641 span_err!(fcx.tcx().sess, lifetime.span, E0088,
4642 "too many lifetime parameters provided: \
4643 expected {} parameter{}, found {} parameter{}",
4645 if region_count == 1 {""} else {"s"},
4646 data.lifetimes.len(),
4647 if data.lifetimes.len() == 1 {""} else {"s"});
4648 substs.mut_regions().truncate(space, 0);
4656 /// `push_explicit_angle_bracketed_parameters_from_segment_to_substs`,
4657 /// but intended for `Foo(A,B) -> C` form. This expands to
4658 /// roughly the same thing as `Foo<(A,B),C>`. One important
4659 /// difference has to do with the treatment of anonymous
4660 /// regions, which are translated into bound regions (NYI).
4661 fn push_explicit_parenthesized_parameters_from_segment_to_substs<'a, 'tcx>(
4662 fcx: &FnCtxt<'a, 'tcx>,
4663 space: subst::ParamSpace,
4665 type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4666 data: &hir::ParenthesizedParameterData,
4667 substs: &mut Substs<'tcx>)
4669 let type_count = type_defs.len(space);
4671 span_err!(fcx.tcx().sess, span, E0167,
4672 "parenthesized form always supplies 2 type parameters, \
4673 but only {} parameter(s) were expected",
4677 let input_tys: Vec<Ty> =
4678 data.inputs.iter().map(|ty| fcx.to_ty(&**ty)).collect();
4680 let tuple_ty = fcx.tcx().mk_tup(input_tys);
4682 if type_count >= 1 {
4683 substs.types.push(space, tuple_ty);
4686 let output_ty: Option<Ty> =
4687 data.output.as_ref().map(|ty| fcx.to_ty(&**ty));
4690 output_ty.unwrap_or(fcx.tcx().mk_nil());
4692 if type_count >= 2 {
4693 substs.types.push(space, output_ty);
4697 fn adjust_type_parameters<'a, 'tcx>(
4698 fcx: &FnCtxt<'a, 'tcx>,
4701 defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4702 require_type_space: bool,
4703 substs: &mut Substs<'tcx>)
4705 let provided_len = substs.types.len(space);
4706 let desired = defs.get_slice(space);
4707 let required_len = desired.iter()
4708 .take_while(|d| d.default.is_none())
4711 debug!("adjust_type_parameters(space={:?}, \
4720 // Enforced by `push_explicit_parameters_from_segment_to_substs()`.
4721 assert!(provided_len <= desired.len());
4723 // Nothing specified at all: supply inference variables for
4725 if provided_len == 0 && !(require_type_space && space == subst::TypeSpace) {
4726 substs.types.replace(space, Vec::new());
4727 fcx.infcx().type_vars_for_defs(span, space, substs, &desired[..]);
4731 // Too few parameters specified: report an error and use Err
4733 if provided_len < required_len {
4735 if desired.len() != required_len { "at least " } else { "" };
4736 span_err!(fcx.tcx().sess, span, E0089,
4737 "too few type parameters provided: expected {}{} parameter{}, \
4738 found {} parameter{}",
4739 qualifier, required_len,
4740 if required_len == 1 {""} else {"s"},
4742 if provided_len == 1 {""} else {"s"});
4743 substs.types.replace(space, vec![fcx.tcx().types.err; desired.len()]);
4747 // Otherwise, add in any optional parameters that the user
4748 // omitted. The case of *too many* parameters is handled
4750 // push_explicit_parameters_from_segment_to_substs(). Note
4751 // that the *default* type are expressed in terms of all prior
4752 // parameters, so we have to substitute as we go with the
4753 // partial substitution that we have built up.
4754 for i in provided_len..desired.len() {
4755 let default = desired[i].default.unwrap();
4756 let default = default.subst_spanned(fcx.tcx(), substs, Some(span));
4757 substs.types.push(space, default);
4759 assert_eq!(substs.types.len(space), desired.len());
4761 debug!("Final substs: {:?}", substs);
4764 fn adjust_region_parameters(
4768 defs: &VecPerParamSpace<ty::RegionParameterDef>,
4769 substs: &mut Substs)
4771 let provided_len = substs.mut_regions().len(space);
4772 let desired = defs.get_slice(space);
4774 // Enforced by `push_explicit_parameters_from_segment_to_substs()`.
4775 assert!(provided_len <= desired.len());
4777 // If nothing was provided, just use inference variables.
4778 if provided_len == 0 {
4779 substs.mut_regions().replace(
4781 fcx.infcx().region_vars_for_defs(span, desired));
4785 // If just the right number were provided, everybody is happy.
4786 if provided_len == desired.len() {
4790 // Otherwise, too few were provided. Report an error and then
4791 // use inference variables.
4792 span_err!(fcx.tcx().sess, span, E0090,
4793 "too few lifetime parameters provided: expected {} parameter{}, \
4794 found {} parameter{}",
4796 if desired.len() == 1 {""} else {"s"},
4798 if provided_len == 1 {""} else {"s"});
4800 substs.mut_regions().replace(
4802 fcx.infcx().region_vars_for_defs(span, desired));
4806 fn structurally_resolve_type_or_else<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
4810 where F: Fn() -> Ty<'tcx>
4812 let mut ty = fcx.resolve_type_vars_if_possible(ty);
4815 let alternative = f();
4818 if alternative.is_ty_var() || alternative.references_error() {
4819 fcx.type_error_message(sp, |_actual| {
4820 "the type of this value must be known in this context".to_string()
4822 demand::suptype(fcx, sp, fcx.tcx().types.err, ty);
4823 ty = fcx.tcx().types.err;
4825 demand::suptype(fcx, sp, alternative, ty);
4833 // Resolves `typ` by a single level if `typ` is a type variable. If no
4834 // resolution is possible, then an error is reported.
4835 pub fn structurally_resolved_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4840 structurally_resolve_type_or_else(fcx, sp, ty, || {
4845 // Returns true if b contains a break that can exit from b
4846 pub fn may_break(cx: &ty::ctxt, id: ast::NodeId, b: &hir::Block) -> bool {
4847 // First: is there an unlabeled break immediately
4849 (loop_query(&*b, |e| {
4851 hir::ExprBreak(None) => true,
4855 // Second: is there a labeled break with label
4856 // <id> nested anywhere inside the loop?
4857 (block_query(b, |e| {
4858 if let hir::ExprBreak(Some(_)) = e.node {
4859 lookup_full_def(cx, e.span, e.id) == def::DefLabel(id)
4866 pub fn check_bounds_are_used<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
4867 tps: &[hir::TyParam],
4869 debug!("check_bounds_are_used(n_tps={}, ty={:?})",
4872 // make a vector of booleans initially false, set to true when used
4873 if tps.is_empty() { return; }
4874 let mut tps_used = vec![false; tps.len()];
4876 for leaf_ty in ty.walk() {
4877 if let ty::TyParam(ParamTy {idx, ..}) = leaf_ty.sty {
4878 debug!("Found use of ty param num {}", idx);
4879 tps_used[idx as usize] = true;
4883 for (i, b) in tps_used.iter().enumerate() {
4885 span_err!(ccx.tcx.sess, tps[i].span, E0091,
4886 "type parameter `{}` is unused",