1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
15 Within the check phase of type check, we check each item one at a time
16 (bodies of function expressions are checked as part of the containing
17 function). Inference is used to supply types wherever they are
20 By far the most complex case is checking the body of a function. This
21 can be broken down into several distinct phases:
23 - gather: creates type variables to represent the type of each local
24 variable and pattern binding.
26 - main: the main pass does the lion's share of the work: it
27 determines the types of all expressions, resolves
28 methods, checks for most invalid conditions, and so forth. In
29 some cases, where a type is unknown, it may create a type or region
30 variable and use that as the type of an expression.
32 In the process of checking, various constraints will be placed on
33 these type variables through the subtyping relationships requested
34 through the `demand` module. The `infer` module is in charge
35 of resolving those constraints.
37 - regionck: after main is complete, the regionck pass goes over all
38 types looking for regions and making sure that they did not escape
39 into places they are not in scope. This may also influence the
40 final assignments of the various region variables if there is some
43 - vtable: find and records the impls to use for each trait bound that
44 appears on a type parameter.
46 - writeback: writes the final types within a function body, replacing
47 type variables with their final inferred types. These final types
48 are written into the `tcx.node_types` table, which should *never* contain
49 any reference to a type variable.
53 While type checking a function, the intermediate types for the
54 expressions, blocks, and so forth contained within the function are
55 stored in `fcx.node_types` and `fcx.item_substs`. These types
56 may contain unresolved type variables. After type checking is
57 complete, the functions in the writeback module are used to take the
58 types from this table, resolve them, and then write them into their
59 permanent home in the type context `ccx.tcx`.
61 This means that during inferencing you should use `fcx.write_ty()`
62 and `fcx.expr_ty()` / `fcx.node_ty()` to write/obtain the types of
63 nodes within the function.
65 The types of top-level items, which never contain unbound type
66 variables, are stored directly into the `tcx` tables.
68 n.b.: A type variable is not the same thing as a type parameter. A
69 type variable is rather an "instance" of a type parameter: that is,
70 given a generic function `fn foo<T>(t: T)`: while checking the
71 function `foo`, the type `ty_param(0)` refers to the type `T`, which
72 is treated in abstract. When `foo()` is called, however, `T` will be
73 substituted for a fresh type variable `N`. This variable will
74 eventually be resolved to some concrete type (which might itself be
79 pub use self::Expectation::*;
80 pub use self::compare_method::{compare_impl_method, compare_const_impl};
81 use self::TupleArgumentsFlag::*;
83 use astconv::{self, ast_region_to_region, ast_ty_to_ty, AstConv, PathParamMode};
84 use check::_match::pat_ctxt;
85 use fmt_macros::{Parser, Piece, Position};
86 use middle::astconv_util::prohibit_type_params;
87 use middle::cstore::LOCAL_CRATE;
89 use middle::def_id::DefId;
91 use middle::infer::{TypeOrigin, type_variable};
92 use middle::pat_util::{self, pat_id_map};
93 use middle::privacy::{AllPublic, LastMod};
94 use middle::subst::{self, Subst, Substs, VecPerParamSpace, ParamSpace, TypeSpace};
95 use middle::traits::{self, report_fulfillment_errors};
96 use middle::ty::{FnSig, GenericPredicates, TypeScheme};
97 use middle::ty::{Disr, ParamTy, ParameterEnvironment};
98 use middle::ty::{LvaluePreference, NoPreference, PreferMutLvalue};
99 use middle::ty::{self, HasTypeFlags, RegionEscape, ToPolyTraitRef, Ty};
100 use middle::ty::{MethodCall, MethodCallee};
101 use middle::ty::adjustment;
102 use middle::ty::error::TypeError;
103 use middle::ty::fold::{TypeFolder, TypeFoldable};
104 use middle::ty::util::Representability;
105 use require_c_abi_if_variadic;
106 use rscope::{ElisionFailureInfo, RegionScope};
107 use session::Session;
108 use {CrateCtxt, lookup_full_def};
111 use util::common::{block_query, ErrorReported, indenter, loop_query};
112 use util::nodemap::{DefIdMap, FnvHashMap, NodeMap};
114 use std::cell::{Cell, Ref, RefCell};
115 use std::collections::{HashSet};
116 use std::mem::replace;
120 use syntax::attr::AttrMetaMethods;
121 use syntax::codemap::{self, Span, Spanned};
122 use syntax::owned_slice::OwnedSlice;
123 use syntax::parse::token::{self, InternedString};
125 use syntax::util::lev_distance::lev_distance;
127 use rustc_front::intravisit::{self, Visitor};
128 use rustc_front::hir;
129 use rustc_front::hir::Visibility;
130 use rustc_front::hir::{Item, ItemImpl};
131 use rustc_front::print::pprust;
132 use rustc_back::slice;
152 /// closures defined within the function. For example:
155 /// bar(move|| { ... })
158 /// Here, the function `foo()` and the closure passed to
159 /// `bar()` will each have their own `FnCtxt`, but they will
160 /// share the inherited fields.
161 pub struct Inherited<'a, 'tcx: 'a> {
162 infcx: infer::InferCtxt<'a, 'tcx>,
163 locals: RefCell<NodeMap<Ty<'tcx>>>,
165 tables: &'a RefCell<ty::Tables<'tcx>>,
167 // When we process a call like `c()` where `c` is a closure type,
168 // we may not have decided yet whether `c` is a `Fn`, `FnMut`, or
169 // `FnOnce` closure. In that case, we defer full resolution of the
170 // call until upvar inference can kick in and make the
171 // decision. We keep these deferred resolutions grouped by the
172 // def-id of the closure, so that once we decide, we can easily go
173 // back and process them.
174 deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolutionHandler<'tcx>>>>,
176 deferred_cast_checks: RefCell<Vec<cast::CastCheck<'tcx>>>,
179 trait DeferredCallResolution<'tcx> {
180 fn resolve<'a>(&mut self, fcx: &FnCtxt<'a,'tcx>);
183 type DeferredCallResolutionHandler<'tcx> = Box<DeferredCallResolution<'tcx>+'tcx>;
185 /// When type-checking an expression, we propagate downward
186 /// whatever type hint we are able in the form of an `Expectation`.
187 #[derive(Copy, Clone, Debug)]
188 pub enum Expectation<'tcx> {
189 /// We know nothing about what type this expression should have.
192 /// This expression should have the type given (or some subtype)
193 ExpectHasType(Ty<'tcx>),
195 /// This expression will be cast to the `Ty`
196 ExpectCastableToType(Ty<'tcx>),
198 /// This rvalue expression will be wrapped in `&` or `Box` and coerced
199 /// to `&Ty` or `Box<Ty>`, respectively. `Ty` is `[A]` or `Trait`.
200 ExpectRvalueLikeUnsized(Ty<'tcx>),
203 impl<'tcx> Expectation<'tcx> {
204 // Disregard "castable to" expectations because they
205 // can lead us astray. Consider for example `if cond
206 // {22} else {c} as u8` -- if we propagate the
207 // "castable to u8" constraint to 22, it will pick the
208 // type 22u8, which is overly constrained (c might not
209 // be a u8). In effect, the problem is that the
210 // "castable to" expectation is not the tightest thing
211 // we can say, so we want to drop it in this case.
212 // The tightest thing we can say is "must unify with
213 // else branch". Note that in the case of a "has type"
214 // constraint, this limitation does not hold.
216 // If the expected type is just a type variable, then don't use
217 // an expected type. Otherwise, we might write parts of the type
218 // when checking the 'then' block which are incompatible with the
220 fn adjust_for_branches<'a>(&self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> {
222 ExpectHasType(ety) => {
223 let ety = fcx.infcx().shallow_resolve(ety);
224 if !ety.is_ty_var() {
230 ExpectRvalueLikeUnsized(ety) => {
231 ExpectRvalueLikeUnsized(ety)
238 #[derive(Copy, Clone)]
239 pub struct UnsafetyState {
240 pub def: ast::NodeId,
241 pub unsafety: hir::Unsafety,
242 pub unsafe_push_count: u32,
247 pub fn function(unsafety: hir::Unsafety, def: ast::NodeId) -> UnsafetyState {
248 UnsafetyState { def: def, unsafety: unsafety, unsafe_push_count: 0, from_fn: true }
251 pub fn recurse(&mut self, blk: &hir::Block) -> UnsafetyState {
252 match self.unsafety {
253 // If this unsafe, then if the outer function was already marked as
254 // unsafe we shouldn't attribute the unsafe'ness to the block. This
255 // way the block can be warned about instead of ignoring this
256 // extraneous block (functions are never warned about).
257 hir::Unsafety::Unsafe if self.from_fn => *self,
260 let (unsafety, def, count) = match blk.rules {
261 hir::PushUnsafeBlock(..) =>
262 (unsafety, blk.id, self.unsafe_push_count.checked_add(1).unwrap()),
263 hir::PopUnsafeBlock(..) =>
264 (unsafety, blk.id, self.unsafe_push_count.checked_sub(1).unwrap()),
265 hir::UnsafeBlock(..) =>
266 (hir::Unsafety::Unsafe, blk.id, self.unsafe_push_count),
267 hir::DefaultBlock | hir::PushUnstableBlock | hir:: PopUnstableBlock =>
268 (unsafety, self.def, self.unsafe_push_count),
270 UnsafetyState{ def: def,
272 unsafe_push_count: count,
280 pub struct FnCtxt<'a, 'tcx: 'a> {
281 body_id: ast::NodeId,
283 // This flag is set to true if, during the writeback phase, we encounter
284 // a type error in this function.
285 writeback_errors: Cell<bool>,
287 // Number of errors that had been reported when we started
288 // checking this function. On exit, if we find that *more* errors
289 // have been reported, we will skip regionck and other work that
290 // expects the types within the function to be consistent.
291 err_count_on_creation: usize,
293 ret_ty: ty::FnOutput<'tcx>,
295 ps: RefCell<UnsafetyState>,
297 inh: &'a Inherited<'a, 'tcx>,
299 ccx: &'a CrateCtxt<'a, 'tcx>,
302 impl<'a, 'tcx> Inherited<'a, 'tcx> {
303 fn new(tcx: &'a ty::ctxt<'tcx>,
304 tables: &'a RefCell<ty::Tables<'tcx>>,
305 param_env: ty::ParameterEnvironment<'a, 'tcx>)
306 -> Inherited<'a, 'tcx> {
309 infcx: infer::new_infer_ctxt(tcx, tables, Some(param_env), true),
310 locals: RefCell::new(NodeMap()),
312 deferred_call_resolutions: RefCell::new(DefIdMap()),
313 deferred_cast_checks: RefCell::new(Vec::new()),
317 fn normalize_associated_types_in<T>(&self,
319 body_id: ast::NodeId,
322 where T : TypeFoldable<'tcx> + HasTypeFlags
324 let mut fulfillment_cx = self.infcx.fulfillment_cx.borrow_mut();
325 assoc::normalize_associated_types_in(&self.infcx,
334 // Used by check_const and check_enum_variants
335 pub fn blank_fn_ctxt<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
336 inh: &'a Inherited<'a, 'tcx>,
337 rty: ty::FnOutput<'tcx>,
338 body_id: ast::NodeId)
339 -> FnCtxt<'a, 'tcx> {
342 writeback_errors: Cell::new(false),
343 err_count_on_creation: ccx.tcx.sess.err_count(),
345 ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, 0)),
351 fn static_inherited_fields<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
352 tables: &'a RefCell<ty::Tables<'tcx>>)
353 -> Inherited<'a, 'tcx> {
354 // It's kind of a kludge to manufacture a fake function context
355 // and statement context, but we might as well do write the code only once
356 let param_env = ccx.tcx.empty_parameter_environment();
357 Inherited::new(ccx.tcx, &tables, param_env)
360 struct CheckItemTypesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
361 struct CheckItemBodiesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
363 impl<'a, 'tcx> Visitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> {
364 fn visit_item(&mut self, i: &'tcx hir::Item) {
365 check_item_type(self.ccx, i);
366 intravisit::walk_item(self, i);
369 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
371 hir::TyFixedLengthVec(_, ref expr) => {
372 check_const_in_type(self.ccx, &**expr, self.ccx.tcx.types.usize);
377 intravisit::walk_ty(self, t);
381 impl<'a, 'tcx> Visitor<'tcx> for CheckItemBodiesVisitor<'a, 'tcx> {
382 fn visit_item(&mut self, i: &'tcx hir::Item) {
383 check_item_body(self.ccx, i);
387 pub fn check_wf_old(ccx: &CrateCtxt) {
388 // If types are not well-formed, it leads to all manner of errors
389 // downstream, so stop reporting errors at this point.
390 ccx.tcx.sess.abort_if_new_errors(|| {
391 // FIXME(#25759). The new code below is much more reliable but (for now)
392 // only generates warnings. So as to ensure that we continue
393 // getting errors where we used to get errors, we run the old wf
394 // code first and abort if it encounters any errors. If no abort
395 // comes, we run the new code and issue warnings.
396 let krate = ccx.tcx.map.krate();
397 let mut visit = wf::CheckTypeWellFormedVisitor::new(ccx);
398 krate.visit_all_items(&mut visit);
402 pub fn check_wf_new(ccx: &CrateCtxt) {
403 ccx.tcx.sess.abort_if_new_errors(|| {
404 let krate = ccx.tcx.map.krate();
405 let mut visit = wfcheck::CheckTypeWellFormedVisitor::new(ccx);
406 krate.visit_all_items(&mut visit);
410 pub fn check_item_types(ccx: &CrateCtxt) {
411 ccx.tcx.sess.abort_if_new_errors(|| {
412 let krate = ccx.tcx.map.krate();
413 let mut visit = CheckItemTypesVisitor { ccx: ccx };
414 krate.visit_all_items(&mut visit);
418 pub fn check_item_bodies(ccx: &CrateCtxt) {
419 ccx.tcx.sess.abort_if_new_errors(|| {
420 let krate = ccx.tcx.map.krate();
421 let mut visit = CheckItemBodiesVisitor { ccx: ccx };
422 krate.visit_all_items(&mut visit);
426 pub fn check_drop_impls(ccx: &CrateCtxt) {
427 ccx.tcx.sess.abort_if_new_errors(|| {
428 let drop_trait = match ccx.tcx.lang_items.drop_trait() {
429 Some(id) => ccx.tcx.lookup_trait_def(id), None => { return }
431 drop_trait.for_each_impl(ccx.tcx, |drop_impl_did| {
432 if drop_impl_did.is_local() {
433 match dropck::check_drop_impl(ccx.tcx, drop_impl_did) {
436 assert!(ccx.tcx.sess.has_errors());
444 fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
445 decl: &'tcx hir::FnDecl,
446 body: &'tcx hir::Block,
450 param_env: ty::ParameterEnvironment<'a, 'tcx>)
453 ty::TyBareFn(_, ref fn_ty) => {
454 let tables = RefCell::new(ty::Tables::empty());
455 let inh = Inherited::new(ccx.tcx, &tables, param_env);
457 // Compute the fty from point of view of inside fn.
458 let fn_scope = ccx.tcx.region_maps.item_extent(body.id);
460 fn_ty.sig.subst(ccx.tcx, &inh.infcx.parameter_environment.free_substs);
462 ccx.tcx.liberate_late_bound_regions(fn_scope, &fn_sig);
464 inh.normalize_associated_types_in(body.span,
468 let fcx = check_fn(ccx, fn_ty.unsafety, fn_id, &fn_sig,
469 decl, fn_id, body, &inh);
471 fcx.select_all_obligations_and_apply_defaults();
472 upvar::closure_analyze_fn(&fcx, fn_id, decl, body);
473 fcx.select_obligations_where_possible();
475 fcx.select_all_obligations_or_error(); // Casts can introduce new obligations.
477 regionck::regionck_fn(&fcx, fn_id, fn_span, decl, body);
478 writeback::resolve_type_vars_in_fn(&fcx, decl, body);
480 _ => ccx.tcx.sess.impossible_case(body.span,
481 "check_bare_fn: function type expected")
485 struct GatherLocalsVisitor<'a, 'tcx: 'a> {
486 fcx: &'a FnCtxt<'a, 'tcx>
489 impl<'a, 'tcx> GatherLocalsVisitor<'a, 'tcx> {
490 fn assign(&mut self, _span: Span, nid: ast::NodeId, ty_opt: Option<Ty<'tcx>>) -> Ty<'tcx> {
493 // infer the variable's type
494 let var_ty = self.fcx.infcx().next_ty_var();
495 self.fcx.inh.locals.borrow_mut().insert(nid, var_ty);
499 // take type that the user specified
500 self.fcx.inh.locals.borrow_mut().insert(nid, typ);
507 impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> {
508 // Add explicitly-declared locals.
509 fn visit_local(&mut self, local: &'tcx hir::Local) {
510 let o_ty = match local.ty {
511 Some(ref ty) => Some(self.fcx.to_ty(&**ty)),
514 self.assign(local.span, local.id, o_ty);
515 debug!("Local variable {:?} is assigned type {}",
517 self.fcx.infcx().ty_to_string(
518 self.fcx.inh.locals.borrow().get(&local.id).unwrap().clone()));
519 intravisit::walk_local(self, local);
522 // Add pattern bindings.
523 fn visit_pat(&mut self, p: &'tcx hir::Pat) {
524 if let hir::PatIdent(_, ref path1, _) = p.node {
525 if pat_util::pat_is_binding(&self.fcx.ccx.tcx.def_map.borrow(), p) {
526 let var_ty = self.assign(p.span, p.id, None);
528 self.fcx.require_type_is_sized(var_ty, p.span,
529 traits::VariableType(p.id));
531 debug!("Pattern binding {} is assigned to {} with type {:?}",
533 self.fcx.infcx().ty_to_string(
534 self.fcx.inh.locals.borrow().get(&p.id).unwrap().clone()),
538 intravisit::walk_pat(self, p);
541 fn visit_block(&mut self, b: &'tcx hir::Block) {
542 // non-obvious: the `blk` variable maps to region lb, so
543 // we have to keep this up-to-date. This
544 // is... unfortunate. It'd be nice to not need this.
545 intravisit::walk_block(self, b);
548 // Since an expr occurs as part of the type fixed size arrays we
549 // need to record the type for that node
550 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
552 hir::TyFixedLengthVec(ref ty, ref count_expr) => {
553 self.visit_ty(&**ty);
554 check_expr_with_hint(self.fcx, &**count_expr, self.fcx.tcx().types.usize);
556 hir::TyBareFn(ref function_declaration) => {
557 intravisit::walk_fn_decl_nopat(self, &function_declaration.decl);
558 walk_list!(self, visit_lifetime_def, &function_declaration.lifetimes);
560 _ => intravisit::walk_ty(self, t)
564 // Don't descend into the bodies of nested closures
565 fn visit_fn(&mut self, _: intravisit::FnKind<'tcx>, _: &'tcx hir::FnDecl,
566 _: &'tcx hir::Block, _: Span, _: ast::NodeId) { }
569 /// Helper used by check_bare_fn and check_expr_fn. Does the grungy work of checking a function
570 /// body and returns the function context used for that purpose, since in the case of a fn item
571 /// there is still a bit more to do.
574 /// * inherited: other fields inherited from the enclosing fn (if any)
575 fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
576 unsafety: hir::Unsafety,
577 unsafety_id: ast::NodeId,
578 fn_sig: &ty::FnSig<'tcx>,
579 decl: &'tcx hir::FnDecl,
581 body: &'tcx hir::Block,
582 inherited: &'a Inherited<'a, 'tcx>)
586 let err_count_on_creation = tcx.sess.err_count();
588 let arg_tys = &fn_sig.inputs;
589 let ret_ty = fn_sig.output;
591 debug!("check_fn(arg_tys={:?}, ret_ty={:?}, fn_id={})",
596 // Create the function context. This is either derived from scratch or,
597 // in the case of function expressions, based on the outer context.
600 writeback_errors: Cell::new(false),
601 err_count_on_creation: err_count_on_creation,
603 ps: RefCell::new(UnsafetyState::function(unsafety, unsafety_id)),
608 if let ty::FnConverging(ret_ty) = ret_ty {
609 fcx.require_type_is_sized(ret_ty, decl.output.span(), traits::ReturnType);
612 debug!("fn-sig-map: fn_id={} fn_sig={:?}", fn_id, fn_sig);
614 inherited.tables.borrow_mut().liberated_fn_sigs.insert(fn_id, fn_sig.clone());
617 let mut visit = GatherLocalsVisitor { fcx: &fcx, };
619 // Add formal parameters.
620 for (arg_ty, input) in arg_tys.iter().zip(&decl.inputs) {
621 // The type of the argument must be well-formed.
623 // NB -- this is now checked in wfcheck, but that
624 // currently only results in warnings, so we issue an
625 // old-style WF obligation here so that we still get the
626 // errors that we used to get.
627 fcx.register_old_wf_obligation(arg_ty, input.ty.span, traits::MiscObligation);
629 // Create type variables for each argument.
630 pat_util::pat_bindings(
633 |_bm, pat_id, sp, _path| {
634 let var_ty = visit.assign(sp, pat_id, None);
635 fcx.require_type_is_sized(var_ty, sp,
636 traits::VariableType(pat_id));
639 // Check the pattern.
642 map: pat_id_map(&tcx.def_map, &*input.pat),
644 _match::check_pat(&pcx, &*input.pat, *arg_ty);
647 visit.visit_block(body);
650 check_block_with_expected(&fcx, body, match ret_ty {
651 ty::FnConverging(result_type) => ExpectHasType(result_type),
652 ty::FnDiverging => NoExpectation
655 for (input, arg) in decl.inputs.iter().zip(arg_tys) {
656 fcx.write_ty(input.id, arg);
662 pub fn check_struct(ccx: &CrateCtxt, id: ast::NodeId, span: Span) {
665 check_representable(tcx, span, id, "struct");
667 if tcx.lookup_simd(ccx.tcx.map.local_def_id(id)) {
668 check_simd(tcx, span, id);
672 pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
673 debug!("check_item_type(it.id={}, it.name={})",
675 ccx.tcx.item_path_str(ccx.tcx.map.local_def_id(it.id)));
676 let _indenter = indenter();
678 // Consts can play a role in type-checking, so they are included here.
679 hir::ItemStatic(_, _, ref e) |
680 hir::ItemConst(_, ref e) => check_const(ccx, it.span, &**e, it.id),
681 hir::ItemEnum(ref enum_definition, _) => {
682 check_enum_variants(ccx,
684 &enum_definition.variants,
687 hir::ItemFn(..) => {} // entirely within check_item_body
688 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
689 debug!("ItemImpl {} with id {}", it.name, it.id);
690 match ccx.tcx.impl_trait_ref(ccx.tcx.map.local_def_id(it.id)) {
691 Some(impl_trait_ref) => {
692 check_impl_items_against_trait(ccx,
700 hir::ItemTrait(_, ref generics, _, _) => {
701 check_trait_on_unimplemented(ccx, generics, it);
703 hir::ItemStruct(..) => {
704 check_struct(ccx, it.id, it.span);
706 hir::ItemTy(_, ref generics) => {
707 let pty_ty = ccx.tcx.node_id_to_type(it.id);
708 check_bounds_are_used(ccx, &generics.ty_params, pty_ty);
710 hir::ItemForeignMod(ref m) => {
711 if m.abi == abi::RustIntrinsic {
712 for item in &m.items {
713 intrinsic::check_intrinsic_type(ccx, item);
715 } else if m.abi == abi::PlatformIntrinsic {
716 for item in &m.items {
717 intrinsic::check_platform_intrinsic_type(ccx, item);
720 for item in &m.items {
721 let pty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(item.id));
722 if !pty.generics.types.is_empty() {
723 span_err!(ccx.tcx.sess, item.span, E0044,
724 "foreign items may not have type parameters");
725 span_help!(ccx.tcx.sess, item.span,
726 "consider using specialization instead of \
730 if let hir::ForeignItemFn(ref fn_decl, _) = item.node {
731 require_c_abi_if_variadic(ccx.tcx, fn_decl, m.abi, item.span);
736 _ => {/* nothing to do */ }
740 pub fn check_item_body<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
741 debug!("check_item_body(it.id={}, it.name={})",
743 ccx.tcx.item_path_str(ccx.tcx.map.local_def_id(it.id)));
744 let _indenter = indenter();
746 hir::ItemFn(ref decl, _, _, _, _, ref body) => {
747 let fn_pty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(it.id));
748 let param_env = ParameterEnvironment::for_item(ccx.tcx, it.id);
749 check_bare_fn(ccx, &**decl, &**body, it.id, it.span, fn_pty.ty, param_env);
751 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
752 debug!("ItemImpl {} with id {}", it.name, it.id);
754 let impl_pty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(it.id));
756 for impl_item in impl_items {
757 match impl_item.node {
758 hir::ImplItemKind::Const(_, ref expr) => {
759 check_const(ccx, impl_item.span, &*expr, impl_item.id)
761 hir::ImplItemKind::Method(ref sig, ref body) => {
762 check_method_body(ccx, &impl_pty.generics, sig, body,
763 impl_item.id, impl_item.span);
765 hir::ImplItemKind::Type(_) => {
766 // Nothing to do here.
771 hir::ItemTrait(_, _, _, ref trait_items) => {
772 let trait_def = ccx.tcx.lookup_trait_def(ccx.tcx.map.local_def_id(it.id));
773 for trait_item in trait_items {
774 match trait_item.node {
775 hir::ConstTraitItem(_, Some(ref expr)) => {
776 check_const(ccx, trait_item.span, &*expr, trait_item.id)
778 hir::MethodTraitItem(ref sig, Some(ref body)) => {
779 check_trait_fn_not_const(ccx, trait_item.span, sig.constness);
781 check_method_body(ccx, &trait_def.generics, sig, body,
782 trait_item.id, trait_item.span);
784 hir::MethodTraitItem(ref sig, None) => {
785 check_trait_fn_not_const(ccx, trait_item.span, sig.constness);
787 hir::ConstTraitItem(_, None) |
788 hir::TypeTraitItem(..) => {
794 _ => {/* nothing to do */ }
798 fn check_trait_fn_not_const<'a,'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
800 constness: hir::Constness)
803 hir::Constness::NotConst => {
806 hir::Constness::Const => {
807 span_err!(ccx.tcx.sess, span, E0379, "trait fns cannot be declared const");
812 fn check_trait_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
813 generics: &hir::Generics,
815 if let Some(ref attr) = item.attrs.iter().find(|a| {
816 a.check_name("rustc_on_unimplemented")
818 if let Some(ref istring) = attr.value_str() {
819 let parser = Parser::new(&istring);
820 let types = &*generics.ty_params;
821 for token in parser {
823 Piece::String(_) => (), // Normal string, no need to check it
824 Piece::NextArgument(a) => match a.position {
825 // `{Self}` is allowed
826 Position::ArgumentNamed(s) if s == "Self" => (),
827 // So is `{A}` if A is a type parameter
828 Position::ArgumentNamed(s) => match types.iter().find(|t| {
833 span_err!(ccx.tcx.sess, attr.span, E0230,
834 "there is no type parameter \
839 // `{:1}` and `{}` are not to be used
840 Position::ArgumentIs(_) | Position::ArgumentNext => {
841 span_err!(ccx.tcx.sess, attr.span, E0231,
842 "only named substitution \
843 parameters are allowed");
849 span_err!(ccx.tcx.sess, attr.span, E0232,
850 "this attribute must have a value, \
851 eg `#[rustc_on_unimplemented = \"foo\"]`")
856 /// Type checks a method body.
860 /// * `item_generics`: generics defined on the impl/trait that contains
862 /// * `self_bound`: bound for the `Self` type parameter, if any
863 /// * `method`: the method definition
864 fn check_method_body<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
865 item_generics: &ty::Generics<'tcx>,
866 sig: &'tcx hir::MethodSig,
867 body: &'tcx hir::Block,
868 id: ast::NodeId, span: Span) {
869 debug!("check_method_body(item_generics={:?}, id={})",
871 let param_env = ParameterEnvironment::for_item(ccx.tcx, id);
873 let fty = ccx.tcx.node_id_to_type(id);
874 debug!("check_method_body: fty={:?}", fty);
876 check_bare_fn(ccx, &sig.decl, body, id, span, fty, param_env);
879 fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
881 impl_trait_ref: &ty::TraitRef<'tcx>,
882 impl_items: &[hir::ImplItem]) {
883 // Locate trait methods
885 let trait_items = tcx.trait_items(impl_trait_ref.def_id);
886 let mut overridden_associated_type = None;
888 // Check existing impl methods to see if they are both present in trait
889 // and compatible with trait signature
890 for impl_item in impl_items {
891 let ty_impl_item = ccx.tcx.impl_or_trait_item(ccx.tcx.map.local_def_id(impl_item.id));
892 let ty_trait_item = trait_items.iter()
893 .find(|ac| ac.name() == ty_impl_item.name());
895 if let Some(ty_trait_item) = ty_trait_item {
896 match impl_item.node {
897 hir::ImplItemKind::Const(..) => {
898 let impl_const = match ty_impl_item {
899 ty::ConstTraitItem(ref cti) => cti,
900 _ => tcx.sess.span_bug(impl_item.span, "non-const impl-item for const")
903 // Find associated const definition.
904 if let &ty::ConstTraitItem(ref trait_const) = ty_trait_item {
905 compare_const_impl(ccx.tcx,
911 span_err!(tcx.sess, impl_item.span, E0323,
912 "item `{}` is an associated const, \
913 which doesn't match its trait `{:?}`",
918 hir::ImplItemKind::Method(ref sig, ref body) => {
919 check_trait_fn_not_const(ccx, impl_item.span, sig.constness);
921 let impl_method = match ty_impl_item {
922 ty::MethodTraitItem(ref mti) => mti,
923 _ => tcx.sess.span_bug(impl_item.span, "non-method impl-item for method")
926 if let &ty::MethodTraitItem(ref trait_method) = ty_trait_item {
927 compare_impl_method(ccx.tcx,
934 span_err!(tcx.sess, impl_item.span, E0324,
935 "item `{}` is an associated method, \
936 which doesn't match its trait `{:?}`",
941 hir::ImplItemKind::Type(_) => {
942 let impl_type = match ty_impl_item {
943 ty::TypeTraitItem(ref tti) => tti,
944 _ => tcx.sess.span_bug(impl_item.span, "non-type impl-item for type")
947 if let &ty::TypeTraitItem(ref at) = ty_trait_item {
948 if let Some(_) = at.ty {
949 overridden_associated_type = Some(impl_item);
952 span_err!(tcx.sess, impl_item.span, E0325,
953 "item `{}` is an associated type, \
954 which doesn't match its trait `{:?}`",
963 // Check for missing items from trait
964 let provided_methods = tcx.provided_trait_methods(impl_trait_ref.def_id);
965 let mut missing_items = Vec::new();
966 let mut invalidated_items = Vec::new();
967 let associated_type_overridden = overridden_associated_type.is_some();
968 for trait_item in trait_items.iter() {
970 ty::ConstTraitItem(ref associated_const) => {
971 let is_implemented = impl_items.iter().any(|ii| {
973 hir::ImplItemKind::Const(..) => {
974 ii.name == associated_const.name
979 let is_provided = associated_const.has_value;
983 missing_items.push(associated_const.name);
984 } else if associated_type_overridden {
985 invalidated_items.push(associated_const.name);
989 ty::MethodTraitItem(ref trait_method) => {
991 impl_items.iter().any(|ii| {
993 hir::ImplItemKind::Method(..) => {
994 ii.name == trait_method.name
1000 provided_methods.iter().any(|m| m.name == trait_method.name);
1001 if !is_implemented {
1003 missing_items.push(trait_method.name);
1004 } else if associated_type_overridden {
1005 invalidated_items.push(trait_method.name);
1009 ty::TypeTraitItem(ref associated_type) => {
1010 let is_implemented = impl_items.iter().any(|ii| {
1012 hir::ImplItemKind::Type(_) => {
1013 ii.name == associated_type.name
1018 let is_provided = associated_type.ty.is_some();
1019 if !is_implemented {
1021 missing_items.push(associated_type.name);
1022 } else if associated_type_overridden {
1023 invalidated_items.push(associated_type.name);
1030 if !missing_items.is_empty() {
1031 span_err!(tcx.sess, impl_span, E0046,
1032 "not all trait items implemented, missing: `{}`",
1033 missing_items.iter()
1034 .map(|name| name.to_string())
1035 .collect::<Vec<_>>().join("`, `"))
1038 if !invalidated_items.is_empty() {
1039 let invalidator = overridden_associated_type.unwrap();
1040 span_err!(tcx.sess, invalidator.span, E0399,
1041 "the following trait items need to be reimplemented \
1042 as `{}` was overridden: `{}`",
1044 invalidated_items.iter()
1045 .map(|name| name.to_string())
1046 .collect::<Vec<_>>().join("`, `"))
1050 fn report_cast_to_unsized_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
1057 let tstr = fcx.infcx().ty_to_string(t_cast);
1058 fcx.type_error_message(span, |actual| {
1059 format!("cast to unsized type: `{}` as `{}`", actual, tstr)
1062 ty::TyRef(_, ty::TypeAndMut { mutbl: mt, .. }) => {
1063 let mtstr = match mt {
1064 hir::MutMutable => "mut ",
1065 hir::MutImmutable => ""
1067 if t_cast.is_trait() {
1068 match fcx.tcx().sess.codemap().span_to_snippet(t_span) {
1070 fcx.tcx().sess.span_suggestion(t_span,
1071 "try casting to a reference instead:",
1072 format!("&{}{}", mtstr, s));
1075 span_help!(fcx.tcx().sess, t_span,
1076 "did you mean `&{}{}`?", mtstr, tstr),
1079 span_help!(fcx.tcx().sess, span,
1080 "consider using an implicit coercion to `&{}{}` instead",
1085 match fcx.tcx().sess.codemap().span_to_snippet(t_span) {
1087 fcx.tcx().sess.span_suggestion(t_span,
1088 "try casting to a `Box` instead:",
1089 format!("Box<{}>", s));
1092 span_help!(fcx.tcx().sess, t_span, "did you mean `Box<{}>`?", tstr),
1096 span_help!(fcx.tcx().sess, e_span,
1097 "consider using a box or reference as appropriate");
1100 fcx.write_error(id);
1104 impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> {
1105 fn tcx(&self) -> &ty::ctxt<'tcx> { self.ccx.tcx }
1107 fn get_item_type_scheme(&self, _: Span, id: DefId)
1108 -> Result<ty::TypeScheme<'tcx>, ErrorReported>
1110 Ok(self.tcx().lookup_item_type(id))
1113 fn get_trait_def(&self, _: Span, id: DefId)
1114 -> Result<&'tcx ty::TraitDef<'tcx>, ErrorReported>
1116 Ok(self.tcx().lookup_trait_def(id))
1119 fn ensure_super_predicates(&self, _: Span, _: DefId) -> Result<(), ErrorReported> {
1120 // all super predicates are ensured during collect pass
1124 fn get_free_substs(&self) -> Option<&Substs<'tcx>> {
1125 Some(&self.inh.infcx.parameter_environment.free_substs)
1128 fn get_type_parameter_bounds(&self,
1130 node_id: ast::NodeId)
1131 -> Result<Vec<ty::PolyTraitRef<'tcx>>, ErrorReported>
1133 let def = self.tcx().type_parameter_def(node_id);
1134 let r = self.inh.infcx.parameter_environment
1137 .filter_map(|predicate| {
1139 ty::Predicate::Trait(ref data) => {
1140 if data.0.self_ty().is_param(def.space, def.index) {
1141 Some(data.to_poly_trait_ref())
1155 fn trait_defines_associated_type_named(&self,
1156 trait_def_id: DefId,
1157 assoc_name: ast::Name)
1160 let trait_def = self.ccx.tcx.lookup_trait_def(trait_def_id);
1161 trait_def.associated_type_names.contains(&assoc_name)
1165 ty_param_def: Option<ty::TypeParameterDef<'tcx>>,
1166 substs: Option<&mut subst::Substs<'tcx>>,
1167 space: Option<subst::ParamSpace>,
1168 span: Span) -> Ty<'tcx> {
1169 // Grab the default doing subsitution
1170 let default = ty_param_def.and_then(|def| {
1171 def.default.map(|ty| type_variable::Default {
1172 ty: ty.subst_spanned(self.tcx(), substs.as_ref().unwrap(), Some(span)),
1174 def_id: def.default_def_id
1178 let ty_var = self.infcx().next_ty_var_with_default(default);
1180 // Finally we add the type variable to the substs
1183 Some(substs) => { substs.types.push(space.unwrap(), ty_var); ty_var }
1187 fn projected_ty_from_poly_trait_ref(&self,
1189 poly_trait_ref: ty::PolyTraitRef<'tcx>,
1190 item_name: ast::Name)
1193 let (trait_ref, _) =
1194 self.infcx().replace_late_bound_regions_with_fresh_var(
1196 infer::LateBoundRegionConversionTime::AssocTypeProjection(item_name),
1199 self.normalize_associated_type(span, trait_ref, item_name)
1202 fn projected_ty(&self,
1204 trait_ref: ty::TraitRef<'tcx>,
1205 item_name: ast::Name)
1208 self.normalize_associated_type(span, trait_ref, item_name)
1212 impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
1213 fn tcx(&self) -> &ty::ctxt<'tcx> { self.ccx.tcx }
1215 pub fn infcx(&self) -> &infer::InferCtxt<'a,'tcx> {
1219 pub fn param_env(&self) -> &ty::ParameterEnvironment<'a,'tcx> {
1220 &self.inh.infcx.parameter_environment
1223 pub fn sess(&self) -> &Session {
1227 pub fn err_count_since_creation(&self) -> usize {
1228 self.ccx.tcx.sess.err_count() - self.err_count_on_creation
1231 /// Resolves type variables in `ty` if possible. Unlike the infcx
1232 /// version, this version will also select obligations if it seems
1233 /// useful, in an effort to get more type information.
1234 fn resolve_type_vars_if_possible(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> {
1235 debug!("resolve_type_vars_if_possible(ty={:?})", ty);
1237 // No TyInfer()? Nothing needs doing.
1238 if !ty.has_infer_types() {
1239 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1243 // If `ty` is a type variable, see whether we already know what it is.
1244 ty = self.infcx().resolve_type_vars_if_possible(&ty);
1245 if !ty.has_infer_types() {
1246 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1250 // If not, try resolving any new fcx obligations that have cropped up.
1251 self.select_new_obligations();
1252 ty = self.infcx().resolve_type_vars_if_possible(&ty);
1253 if !ty.has_infer_types() {
1254 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1258 // If not, try resolving *all* pending obligations as much as
1259 // possible. This can help substantially when there are
1260 // indirect dependencies that don't seem worth tracking
1262 self.select_obligations_where_possible();
1263 ty = self.infcx().resolve_type_vars_if_possible(&ty);
1265 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1269 fn record_deferred_call_resolution(&self,
1270 closure_def_id: DefId,
1271 r: DeferredCallResolutionHandler<'tcx>) {
1272 let mut deferred_call_resolutions = self.inh.deferred_call_resolutions.borrow_mut();
1273 deferred_call_resolutions.entry(closure_def_id).or_insert(vec![]).push(r);
1276 fn remove_deferred_call_resolutions(&self,
1277 closure_def_id: DefId)
1278 -> Vec<DeferredCallResolutionHandler<'tcx>>
1280 let mut deferred_call_resolutions = self.inh.deferred_call_resolutions.borrow_mut();
1281 deferred_call_resolutions.remove(&closure_def_id).unwrap_or(Vec::new())
1284 pub fn tag(&self) -> String {
1285 let self_ptr: *const FnCtxt = self;
1286 format!("{:?}", self_ptr)
1289 pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> Ty<'tcx> {
1290 match self.inh.locals.borrow().get(&nid) {
1293 span_err!(self.tcx().sess, span, E0513,
1294 "no type for local variable {}",
1296 self.tcx().types.err
1302 pub fn write_ty(&self, node_id: ast::NodeId, ty: Ty<'tcx>) {
1303 debug!("write_ty({}, {:?}) in fcx {}",
1304 node_id, ty, self.tag());
1305 self.inh.tables.borrow_mut().node_types.insert(node_id, ty);
1308 pub fn write_substs(&self, node_id: ast::NodeId, substs: ty::ItemSubsts<'tcx>) {
1309 if !substs.substs.is_noop() {
1310 debug!("write_substs({}, {:?}) in fcx {}",
1315 self.inh.tables.borrow_mut().item_substs.insert(node_id, substs);
1319 pub fn write_autoderef_adjustment(&self,
1320 node_id: ast::NodeId,
1322 self.write_adjustment(
1324 adjustment::AdjustDerefRef(adjustment::AutoDerefRef {
1332 pub fn write_adjustment(&self,
1333 node_id: ast::NodeId,
1334 adj: adjustment::AutoAdjustment<'tcx>) {
1335 debug!("write_adjustment(node_id={}, adj={:?})", node_id, adj);
1337 if adj.is_identity() {
1341 self.inh.tables.borrow_mut().adjustments.insert(node_id, adj);
1344 /// Basically whenever we are converting from a type scheme into
1345 /// the fn body space, we always want to normalize associated
1346 /// types as well. This function combines the two.
1347 fn instantiate_type_scheme<T>(&self,
1349 substs: &Substs<'tcx>,
1352 where T : TypeFoldable<'tcx> + HasTypeFlags
1354 let value = value.subst(self.tcx(), substs);
1355 let result = self.normalize_associated_types_in(span, &value);
1356 debug!("instantiate_type_scheme(value={:?}, substs={:?}) = {:?}",
1363 /// As `instantiate_type_scheme`, but for the bounds found in a
1364 /// generic type scheme.
1365 fn instantiate_bounds(&self,
1367 substs: &Substs<'tcx>,
1368 bounds: &ty::GenericPredicates<'tcx>)
1369 -> ty::InstantiatedPredicates<'tcx>
1371 ty::InstantiatedPredicates {
1372 predicates: self.instantiate_type_scheme(span, substs, &bounds.predicates)
1377 fn normalize_associated_types_in<T>(&self, span: Span, value: &T) -> T
1378 where T : TypeFoldable<'tcx> + HasTypeFlags
1380 self.inh.normalize_associated_types_in(span, self.body_id, value)
1383 fn normalize_associated_type(&self,
1385 trait_ref: ty::TraitRef<'tcx>,
1386 item_name: ast::Name)
1389 let cause = traits::ObligationCause::new(span,
1391 traits::ObligationCauseCode::MiscObligation);
1396 .normalize_projection_type(self.infcx(),
1398 trait_ref: trait_ref,
1399 item_name: item_name,
1404 /// Instantiates the type in `did` with the generics in `path` and returns
1405 /// it (registering the necessary trait obligations along the way).
1407 /// Note that this function is only intended to be used with type-paths,
1408 /// not with value-paths.
1409 pub fn instantiate_type(&self,
1414 debug!("instantiate_type(did={:?}, path={:?})", did, path);
1416 self.tcx().lookup_item_type(did);
1417 let type_predicates =
1418 self.tcx().lookup_predicates(did);
1419 let substs = astconv::ast_path_substs_for_ty(self, self,
1421 PathParamMode::Optional,
1422 &type_scheme.generics,
1423 path.segments.last().unwrap());
1424 debug!("instantiate_type: ty={:?} substs={:?}", &type_scheme.ty, &substs);
1426 self.instantiate_bounds(path.span, &substs, &type_predicates);
1427 self.add_obligations_for_parameters(
1428 traits::ObligationCause::new(
1431 traits::ItemObligation(did)),
1434 self.instantiate_type_scheme(path.span, &substs, &type_scheme.ty)
1437 /// Return the dict-like variant corresponding to a given `Def`.
1438 pub fn def_struct_variant(&self,
1441 -> Option<(ty::AdtDef<'tcx>, ty::VariantDef<'tcx>)>
1443 let (adt, variant) = match def {
1444 def::DefVariant(enum_id, variant_id, _) => {
1445 let adt = self.tcx().lookup_adt_def(enum_id);
1446 (adt, adt.variant_with_id(variant_id))
1448 def::DefTy(did, _) | def::DefStruct(did) => {
1449 let typ = self.tcx().lookup_item_type(did);
1450 if let ty::TyStruct(adt, _) = typ.ty.sty {
1451 (adt, adt.struct_variant())
1459 let var_kind = variant.kind();
1460 if var_kind == ty::VariantKind::Struct {
1461 Some((adt, variant))
1462 } else if var_kind == ty::VariantKind::Unit {
1463 if !self.tcx().sess.features.borrow().braced_empty_structs {
1464 self.tcx().sess.span_err(span, "empty structs and enum variants \
1465 with braces are unstable");
1466 fileline_help!(self.tcx().sess, span, "add #![feature(braced_empty_structs)] to \
1467 the crate features to enable");
1470 Some((adt, variant))
1476 pub fn write_nil(&self, node_id: ast::NodeId) {
1477 self.write_ty(node_id, self.tcx().mk_nil());
1479 pub fn write_error(&self, node_id: ast::NodeId) {
1480 self.write_ty(node_id, self.tcx().types.err);
1483 pub fn require_type_meets(&self,
1486 code: traits::ObligationCauseCode<'tcx>,
1487 bound: ty::BuiltinBound)
1489 self.register_builtin_bound(
1492 traits::ObligationCause::new(span, self.body_id, code));
1495 pub fn require_type_is_sized(&self,
1498 code: traits::ObligationCauseCode<'tcx>)
1500 self.require_type_meets(ty, span, code, ty::BoundSized);
1503 pub fn require_expr_have_sized_type(&self,
1505 code: traits::ObligationCauseCode<'tcx>)
1507 self.require_type_is_sized(self.expr_ty(expr), expr.span, code);
1510 pub fn type_is_known_to_be_sized(&self,
1515 traits::type_known_to_meet_builtin_bound(self.infcx(),
1521 pub fn register_builtin_bound(&self,
1523 builtin_bound: ty::BuiltinBound,
1524 cause: traits::ObligationCause<'tcx>)
1526 self.inh.infcx.fulfillment_cx.borrow_mut()
1527 .register_builtin_bound(self.infcx(), ty, builtin_bound, cause);
1530 pub fn register_predicate(&self,
1531 obligation: traits::PredicateObligation<'tcx>)
1533 debug!("register_predicate({:?})",
1535 self.inh.infcx.fulfillment_cx
1537 .register_predicate_obligation(self.infcx(), obligation);
1540 pub fn to_ty(&self, ast_t: &hir::Ty) -> Ty<'tcx> {
1541 let t = ast_ty_to_ty(self, self, ast_t);
1542 self.register_wf_obligation(t, ast_t.span, traits::MiscObligation);
1546 pub fn expr_ty(&self, ex: &hir::Expr) -> Ty<'tcx> {
1547 match self.inh.tables.borrow().node_types.get(&ex.id) {
1550 self.tcx().sess.bug(&format!("no type for expr in fcx {}",
1556 /// Apply `adjustment` to the type of `expr`
1557 pub fn adjust_expr_ty(&self,
1559 adjustment: Option<&adjustment::AutoAdjustment<'tcx>>)
1562 let raw_ty = self.expr_ty(expr);
1563 let raw_ty = self.infcx().shallow_resolve(raw_ty);
1564 let resolve_ty = |ty: Ty<'tcx>| self.infcx().resolve_type_vars_if_possible(&ty);
1565 raw_ty.adjust(self.tcx(), expr.span, expr.id, adjustment, |method_call| {
1566 self.inh.tables.borrow().method_map.get(&method_call)
1567 .map(|method| resolve_ty(method.ty))
1571 pub fn node_ty(&self, id: ast::NodeId) -> Ty<'tcx> {
1572 match self.inh.tables.borrow().node_types.get(&id) {
1574 None if self.err_count_since_creation() != 0 => self.tcx().types.err,
1576 self.tcx().sess.bug(
1577 &format!("no type for node {}: {} in fcx {}",
1578 id, self.tcx().map.node_to_string(id),
1584 pub fn item_substs(&self) -> Ref<NodeMap<ty::ItemSubsts<'tcx>>> {
1585 // NOTE: @jroesch this is hack that appears to be fixed on nightly, will monitor if
1586 // it changes when we upgrade the snapshot compiler
1587 fn project_item_susbts<'a, 'tcx>(tables: &'a ty::Tables<'tcx>)
1588 -> &'a NodeMap<ty::ItemSubsts<'tcx>> {
1592 Ref::map(self.inh.tables.borrow(), project_item_susbts)
1595 pub fn opt_node_ty_substs<F>(&self,
1598 F: FnOnce(&ty::ItemSubsts<'tcx>),
1600 match self.inh.tables.borrow().item_substs.get(&id) {
1606 pub fn mk_subty(&self,
1607 a_is_expected: bool,
1611 -> Result<(), TypeError<'tcx>> {
1612 infer::mk_subty(self.infcx(), a_is_expected, origin, sub, sup)
1615 pub fn mk_eqty(&self,
1616 a_is_expected: bool,
1620 -> Result<(), TypeError<'tcx>> {
1621 infer::mk_eqty(self.infcx(), a_is_expected, origin, sub, sup)
1624 pub fn mk_subr(&self,
1625 origin: infer::SubregionOrigin<'tcx>,
1628 infer::mk_subr(self.infcx(), origin, sub, sup)
1631 pub fn type_error_message<M>(&self,
1634 actual_ty: Ty<'tcx>,
1635 err: Option<&TypeError<'tcx>>) where
1636 M: FnOnce(String) -> String,
1638 self.infcx().type_error_message(sp, mk_msg, actual_ty, err);
1641 pub fn report_mismatched_types(&self,
1645 err: &TypeError<'tcx>) {
1646 self.infcx().report_mismatched_types(sp, e, a, err)
1649 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1650 /// outlive the region `r`.
1651 pub fn register_region_obligation(&self,
1654 cause: traits::ObligationCause<'tcx>)
1656 let mut fulfillment_cx = self.inh.infcx.fulfillment_cx.borrow_mut();
1657 fulfillment_cx.register_region_obligation(ty, region, cause);
1660 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1661 /// outlive the region `r`.
1662 pub fn register_wf_obligation(&self,
1665 code: traits::ObligationCauseCode<'tcx>)
1667 // WF obligations never themselves fail, so no real need to give a detailed cause:
1668 let cause = traits::ObligationCause::new(span, self.body_id, code);
1669 self.register_predicate(traits::Obligation::new(cause, ty::Predicate::WellFormed(ty)));
1672 pub fn register_old_wf_obligation(&self,
1675 code: traits::ObligationCauseCode<'tcx>)
1677 // Registers an "old-style" WF obligation that uses the
1678 // implicator code. This is basically a buggy version of
1679 // `register_wf_obligation` that is being kept around
1680 // temporarily just to help with phasing in the newer rules.
1682 // FIXME(#27579) all uses of this should be migrated to register_wf_obligation eventually
1683 let cause = traits::ObligationCause::new(span, self.body_id, code);
1684 self.register_region_obligation(ty, ty::ReEmpty, cause);
1687 /// Registers obligations that all types appearing in `substs` are well-formed.
1688 pub fn add_wf_bounds(&self, substs: &Substs<'tcx>, expr: &hir::Expr)
1690 for &ty in &substs.types {
1691 self.register_wf_obligation(ty, expr.span, traits::MiscObligation);
1695 /// Given a fully substituted set of bounds (`generic_bounds`), and the values with which each
1696 /// type/region parameter was instantiated (`substs`), creates and registers suitable
1697 /// trait/region obligations.
1699 /// For example, if there is a function:
1702 /// fn foo<'a,T:'a>(...)
1705 /// and a reference:
1711 /// Then we will create a fresh region variable `'$0` and a fresh type variable `$1` for `'a`
1712 /// and `T`. This routine will add a region obligation `$1:'$0` and register it locally.
1713 pub fn add_obligations_for_parameters(&self,
1714 cause: traits::ObligationCause<'tcx>,
1715 predicates: &ty::InstantiatedPredicates<'tcx>)
1717 assert!(!predicates.has_escaping_regions());
1719 debug!("add_obligations_for_parameters(predicates={:?})",
1722 for obligation in traits::predicates_for_generics(cause, predicates) {
1723 self.register_predicate(obligation);
1727 // FIXME(arielb1): use this instead of field.ty everywhere
1728 pub fn field_ty(&self,
1730 field: ty::FieldDef<'tcx>,
1731 substs: &Substs<'tcx>)
1734 self.normalize_associated_types_in(span,
1735 &field.ty(self.tcx(), substs))
1738 // Only for fields! Returns <none> for methods>
1739 // Indifferent to privacy flags
1740 fn check_casts(&self) {
1741 let mut deferred_cast_checks = self.inh.deferred_cast_checks.borrow_mut();
1742 for cast in deferred_cast_checks.drain(..) {
1747 /// Apply "fallbacks" to some types
1748 /// ! gets replaced with (), unconstrained ints with i32, and unconstrained floats with f64.
1749 fn default_type_parameters(&self) {
1750 use middle::ty::error::UnconstrainedNumeric::Neither;
1751 use middle::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1752 for ty in &self.infcx().unsolved_variables() {
1753 let resolved = self.infcx().resolve_type_vars_if_possible(ty);
1754 if self.infcx().type_var_diverges(resolved) {
1755 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1757 match self.infcx().type_is_unconstrained_numeric(resolved) {
1758 UnconstrainedInt => {
1759 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
1761 UnconstrainedFloat => {
1762 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
1770 fn select_all_obligations_and_apply_defaults(&self) {
1771 if self.tcx().sess.features.borrow().default_type_parameter_fallback {
1772 self.new_select_all_obligations_and_apply_defaults();
1774 self.old_select_all_obligations_and_apply_defaults();
1778 // Implements old type inference fallback algorithm
1779 fn old_select_all_obligations_and_apply_defaults(&self) {
1780 self.select_obligations_where_possible();
1781 self.default_type_parameters();
1782 self.select_obligations_where_possible();
1785 fn new_select_all_obligations_and_apply_defaults(&self) {
1786 use middle::ty::error::UnconstrainedNumeric::Neither;
1787 use middle::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1789 // For the time being this errs on the side of being memory wasteful but provides better
1791 // let type_variables = self.infcx().type_variables.clone();
1793 // There is a possibility that this algorithm will have to run an arbitrary number of times
1794 // to terminate so we bound it by the compiler's recursion limit.
1795 for _ in 0..self.tcx().sess.recursion_limit.get() {
1796 // First we try to solve all obligations, it is possible that the last iteration
1797 // has made it possible to make more progress.
1798 self.select_obligations_where_possible();
1800 let mut conflicts = Vec::new();
1802 // Collect all unsolved type, integral and floating point variables.
1803 let unsolved_variables = self.inh.infcx.unsolved_variables();
1805 // We must collect the defaults *before* we do any unification. Because we have
1806 // directly attached defaults to the type variables any unification that occurs
1807 // will erase defaults causing conflicting defaults to be completely ignored.
1808 let default_map: FnvHashMap<_, _> =
1811 .filter_map(|t| self.infcx().default(t).map(|d| (t, d)))
1814 let mut unbound_tyvars = HashSet::new();
1816 debug!("select_all_obligations_and_apply_defaults: defaults={:?}", default_map);
1818 // We loop over the unsolved variables, resolving them and if they are
1819 // and unconstrainted numberic type we add them to the set of unbound
1820 // variables. We do this so we only apply literal fallback to type
1821 // variables without defaults.
1822 for ty in &unsolved_variables {
1823 let resolved = self.infcx().resolve_type_vars_if_possible(ty);
1824 if self.infcx().type_var_diverges(resolved) {
1825 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1827 match self.infcx().type_is_unconstrained_numeric(resolved) {
1828 UnconstrainedInt | UnconstrainedFloat => {
1829 unbound_tyvars.insert(resolved);
1836 // We now remove any numeric types that also have defaults, and instead insert
1837 // the type variable with a defined fallback.
1838 for ty in &unsolved_variables {
1839 if let Some(_default) = default_map.get(ty) {
1840 let resolved = self.infcx().resolve_type_vars_if_possible(ty);
1842 debug!("select_all_obligations_and_apply_defaults: ty: {:?} with default: {:?}",
1845 match resolved.sty {
1846 ty::TyInfer(ty::TyVar(_)) => {
1847 unbound_tyvars.insert(ty);
1850 ty::TyInfer(ty::IntVar(_)) | ty::TyInfer(ty::FloatVar(_)) => {
1851 unbound_tyvars.insert(ty);
1852 if unbound_tyvars.contains(resolved) {
1853 unbound_tyvars.remove(resolved);
1862 // If there are no more fallbacks to apply at this point we have applied all possible
1863 // defaults and type inference will proceed as normal.
1864 if unbound_tyvars.is_empty() {
1868 // Finally we go through each of the unbound type variables and unify them with
1869 // the proper fallback, reporting a conflicting default error if any of the
1870 // unifications fail. We know it must be a conflicting default because the
1871 // variable would only be in `unbound_tyvars` and have a concrete value if
1872 // it had been solved by previously applying a default.
1874 // We wrap this in a transaction for error reporting, if we detect a conflict
1875 // we will rollback the inference context to its prior state so we can probe
1876 // for conflicts and correctly report them.
1879 let _ = self.infcx().commit_if_ok(|_: &infer::CombinedSnapshot| {
1880 for ty in &unbound_tyvars {
1881 if self.infcx().type_var_diverges(ty) {
1882 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1884 match self.infcx().type_is_unconstrained_numeric(ty) {
1885 UnconstrainedInt => {
1886 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
1888 UnconstrainedFloat => {
1889 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
1892 if let Some(default) = default_map.get(ty) {
1893 let default = default.clone();
1894 match infer::mk_eqty(self.infcx(), false,
1895 TypeOrigin::Misc(default.origin_span),
1899 conflicts.push((*ty, default));
1908 // If there are conflicts we rollback, otherwise commit
1909 if conflicts.len() > 0 {
1916 if conflicts.len() > 0 {
1917 // Loop through each conflicting default, figuring out the default that caused
1918 // a unification failure and then report an error for each.
1919 for (conflict, default) in conflicts {
1920 let conflicting_default =
1921 self.find_conflicting_default(&unbound_tyvars, &default_map, conflict)
1922 .unwrap_or(type_variable::Default {
1923 ty: self.infcx().next_ty_var(),
1924 origin_span: codemap::DUMMY_SP,
1925 def_id: self.tcx().map.local_def_id(0) // what do I put here?
1928 // This is to ensure that we elimnate any non-determinism from the error
1929 // reporting by fixing an order, it doesn't matter what order we choose
1930 // just that it is consistent.
1931 let (first_default, second_default) =
1932 if default.def_id < conflicting_default.def_id {
1933 (default, conflicting_default)
1935 (conflicting_default, default)
1939 self.infcx().report_conflicting_default_types(
1940 first_default.origin_span,
1947 self.select_obligations_where_possible();
1950 // For use in error handling related to default type parameter fallback. We explicitly
1951 // apply the default that caused conflict first to a local version of the type variable
1952 // table then apply defaults until we find a conflict. That default must be the one
1953 // that caused conflict earlier.
1954 fn find_conflicting_default(&self,
1955 unbound_vars: &HashSet<Ty<'tcx>>,
1956 default_map: &FnvHashMap<&Ty<'tcx>, type_variable::Default<'tcx>>,
1958 -> Option<type_variable::Default<'tcx>> {
1959 use middle::ty::error::UnconstrainedNumeric::Neither;
1960 use middle::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1962 // Ensure that we apply the conflicting default first
1963 let mut unbound_tyvars = Vec::with_capacity(unbound_vars.len() + 1);
1964 unbound_tyvars.push(conflict);
1965 unbound_tyvars.extend(unbound_vars.iter());
1967 let mut result = None;
1968 // We run the same code as above applying defaults in order, this time when
1969 // we find the conflict we just return it for error reporting above.
1971 // We also run this inside snapshot that never commits so we can do error
1972 // reporting for more then one conflict.
1973 for ty in &unbound_tyvars {
1974 if self.infcx().type_var_diverges(ty) {
1975 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1977 match self.infcx().type_is_unconstrained_numeric(ty) {
1978 UnconstrainedInt => {
1979 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
1981 UnconstrainedFloat => {
1982 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
1985 if let Some(default) = default_map.get(ty) {
1986 let default = default.clone();
1987 match infer::mk_eqty(self.infcx(), false,
1988 TypeOrigin::Misc(default.origin_span),
1992 result = Some(default);
2004 fn select_all_obligations_or_error(&self) {
2005 debug!("select_all_obligations_or_error");
2007 // upvar inference should have ensured that all deferred call
2008 // resolutions are handled by now.
2009 assert!(self.inh.deferred_call_resolutions.borrow().is_empty());
2011 self.select_all_obligations_and_apply_defaults();
2013 let mut fulfillment_cx = self.inh.infcx.fulfillment_cx.borrow_mut();
2014 match fulfillment_cx.select_all_or_error(self.infcx()) {
2016 Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
2020 /// Select as many obligations as we can at present.
2021 fn select_obligations_where_possible(&self) {
2023 self.inh.infcx.fulfillment_cx
2025 .select_where_possible(self.infcx())
2028 Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
2032 /// Try to select any fcx obligation that we haven't tried yet, in an effort
2033 /// to improve inference. You could just call
2034 /// `select_obligations_where_possible` except that it leads to repeated
2036 fn select_new_obligations(&self) {
2038 self.inh.infcx.fulfillment_cx
2040 .select_new_obligations(self.infcx())
2043 Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
2049 impl<'a, 'tcx> RegionScope for FnCtxt<'a, 'tcx> {
2050 fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> {
2051 Some(self.base_object_lifetime_default(span))
2054 fn base_object_lifetime_default(&self, span: Span) -> ty::Region {
2055 // RFC #599 specifies that object lifetime defaults take
2056 // precedence over other defaults. But within a fn body we
2057 // don't have a *default* region, rather we use inference to
2058 // find the *correct* region, which is strictly more general
2059 // (and anyway, within a fn body the right region may not even
2060 // be something the user can write explicitly, since it might
2061 // be some expression).
2062 self.infcx().next_region_var(infer::MiscVariable(span))
2065 fn anon_regions(&self, span: Span, count: usize)
2066 -> Result<Vec<ty::Region>, Option<Vec<ElisionFailureInfo>>> {
2067 Ok((0..count).map(|_| {
2068 self.infcx().next_region_var(infer::MiscVariable(span))
2073 /// Whether `autoderef` requires types to resolve.
2074 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
2075 pub enum UnresolvedTypeAction {
2076 /// Produce an error and return `TyError` whenever a type cannot
2077 /// be resolved (i.e. it is `TyInfer`).
2079 /// Go on without emitting any errors, and return the unresolved
2080 /// type. Useful for probing, e.g. in coercions.
2084 /// Executes an autoderef loop for the type `t`. At each step, invokes `should_stop` to decide
2085 /// whether to terminate the loop. Returns the final type and number of derefs that it performed.
2087 /// Note: this method does not modify the adjustments table. The caller is responsible for
2088 /// inserting an AutoAdjustment record into the `fcx` using one of the suitable methods.
2089 pub fn autoderef<'a, 'tcx, T, F>(fcx: &FnCtxt<'a, 'tcx>,
2092 opt_expr: Option<&hir::Expr>,
2093 unresolved_type_action: UnresolvedTypeAction,
2094 mut lvalue_pref: LvaluePreference,
2096 -> (Ty<'tcx>, usize, Option<T>)
2097 where F: FnMut(Ty<'tcx>, usize) -> Option<T>,
2099 debug!("autoderef(base_ty={:?}, opt_expr={:?}, lvalue_pref={:?})",
2104 let mut t = base_ty;
2105 for autoderefs in 0..fcx.tcx().sess.recursion_limit.get() {
2106 let resolved_t = match unresolved_type_action {
2107 UnresolvedTypeAction::Error => {
2108 structurally_resolved_type(fcx, sp, t)
2110 UnresolvedTypeAction::Ignore => {
2111 // We can continue even when the type cannot be resolved
2112 // (i.e. it is an inference variable) because `Ty::builtin_deref`
2113 // and `try_overloaded_deref` both simply return `None`
2114 // in such a case without producing spurious errors.
2115 fcx.resolve_type_vars_if_possible(t)
2118 if resolved_t.references_error() {
2119 return (resolved_t, autoderefs, None);
2122 match should_stop(resolved_t, autoderefs) {
2123 Some(x) => return (resolved_t, autoderefs, Some(x)),
2127 // Otherwise, deref if type is derefable:
2128 let mt = match resolved_t.builtin_deref(false, lvalue_pref) {
2129 Some(mt) => Some(mt),
2132 opt_expr.map(|expr| MethodCall::autoderef(expr.id, autoderefs as u32));
2134 // Super subtle: it might seem as though we should
2135 // pass `opt_expr` to `try_overloaded_deref`, so that
2136 // the (implicit) autoref of using an overloaded deref
2137 // would get added to the adjustment table. However we
2138 // do not do that, because it's kind of a
2139 // "meta-adjustment" -- instead, we just leave it
2140 // unrecorded and know that there "will be" an
2141 // autoref. regionck and other bits of the code base,
2142 // when they encounter an overloaded autoderef, have
2143 // to do some reconstructive surgery. This is a pretty
2144 // complex mess that is begging for a proper MIR.
2145 try_overloaded_deref(fcx, sp, method_call, None, resolved_t, lvalue_pref)
2151 if mt.mutbl == hir::MutImmutable {
2152 lvalue_pref = NoPreference;
2155 None => return (resolved_t, autoderefs, None)
2159 // We've reached the recursion limit, error gracefully.
2160 span_err!(fcx.tcx().sess, sp, E0055,
2161 "reached the recursion limit while auto-dereferencing {:?}",
2163 (fcx.tcx().types.err, 0, None)
2166 fn try_overloaded_deref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2168 method_call: Option<MethodCall>,
2169 base_expr: Option<&hir::Expr>,
2171 lvalue_pref: LvaluePreference)
2172 -> Option<ty::TypeAndMut<'tcx>>
2174 // Try DerefMut first, if preferred.
2175 let method = match (lvalue_pref, fcx.tcx().lang_items.deref_mut_trait()) {
2176 (PreferMutLvalue, Some(trait_did)) => {
2177 method::lookup_in_trait(fcx, span, base_expr,
2178 token::intern("deref_mut"), trait_did,
2184 // Otherwise, fall back to Deref.
2185 let method = match (method, fcx.tcx().lang_items.deref_trait()) {
2186 (None, Some(trait_did)) => {
2187 method::lookup_in_trait(fcx, span, base_expr,
2188 token::intern("deref"), trait_did,
2191 (method, _) => method
2194 make_overloaded_lvalue_return_type(fcx, method_call, method)
2197 /// For the overloaded lvalue expressions (`*x`, `x[3]`), the trait returns a type of `&T`, but the
2198 /// actual type we assign to the *expression* is `T`. So this function just peels off the return
2199 /// type by one layer to yield `T`. It also inserts the `method-callee` into the method map.
2200 fn make_overloaded_lvalue_return_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2201 method_call: Option<MethodCall>,
2202 method: Option<MethodCallee<'tcx>>)
2203 -> Option<ty::TypeAndMut<'tcx>>
2207 // extract method return type, which will be &T;
2208 // all LB regions should have been instantiated during method lookup
2209 let ret_ty = method.ty.fn_ret();
2210 let ret_ty = fcx.tcx().no_late_bound_regions(&ret_ty).unwrap().unwrap();
2212 if let Some(method_call) = method_call {
2213 fcx.inh.tables.borrow_mut().method_map.insert(method_call, method);
2216 // method returns &T, but the type as visible to user is T, so deref
2217 ret_ty.builtin_deref(true, NoPreference)
2223 fn lookup_indexing<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2225 base_expr: &'tcx hir::Expr,
2228 lvalue_pref: LvaluePreference)
2229 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2231 // FIXME(#18741) -- this is almost but not quite the same as the
2232 // autoderef that normal method probing does. They could likely be
2235 let (ty, autoderefs, final_mt) = autoderef(fcx,
2239 UnresolvedTypeAction::Error,
2242 try_index_step(fcx, MethodCall::expr(expr.id), expr, base_expr,
2243 adj_ty, idx, false, lvalue_pref, idx_ty)
2246 if final_mt.is_some() {
2250 // After we have fully autoderef'd, if the resulting type is [T; n], then
2251 // do a final unsized coercion to yield [T].
2252 if let ty::TyArray(element_ty, _) = ty.sty {
2253 let adjusted_ty = fcx.tcx().mk_slice(element_ty);
2254 try_index_step(fcx, MethodCall::expr(expr.id), expr, base_expr,
2255 adjusted_ty, autoderefs, true, lvalue_pref, idx_ty)
2261 /// To type-check `base_expr[index_expr]`, we progressively autoderef (and otherwise adjust)
2262 /// `base_expr`, looking for a type which either supports builtin indexing or overloaded indexing.
2263 /// This loop implements one step in that search; the autoderef loop is implemented by
2264 /// `lookup_indexing`.
2265 fn try_index_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2266 method_call: MethodCall,
2268 base_expr: &'tcx hir::Expr,
2269 adjusted_ty: Ty<'tcx>,
2272 lvalue_pref: LvaluePreference,
2274 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2276 let tcx = fcx.tcx();
2277 debug!("try_index_step(expr={:?}, base_expr.id={:?}, adjusted_ty={:?}, \
2278 autoderefs={}, unsize={}, index_ty={:?})",
2286 let input_ty = fcx.infcx().next_ty_var();
2288 // First, try built-in indexing.
2289 match (adjusted_ty.builtin_index(), &index_ty.sty) {
2290 (Some(ty), &ty::TyUint(ast::TyUs)) | (Some(ty), &ty::TyInfer(ty::IntVar(_))) => {
2291 debug!("try_index_step: success, using built-in indexing");
2292 // If we had `[T; N]`, we should've caught it before unsizing to `[T]`.
2294 fcx.write_autoderef_adjustment(base_expr.id, autoderefs);
2295 return Some((tcx.types.usize, ty));
2300 // Try `IndexMut` first, if preferred.
2301 let method = match (lvalue_pref, tcx.lang_items.index_mut_trait()) {
2302 (PreferMutLvalue, Some(trait_did)) => {
2303 method::lookup_in_trait_adjusted(fcx,
2306 token::intern("index_mut"),
2311 Some(vec![input_ty]))
2316 // Otherwise, fall back to `Index`.
2317 let method = match (method, tcx.lang_items.index_trait()) {
2318 (None, Some(trait_did)) => {
2319 method::lookup_in_trait_adjusted(fcx,
2322 token::intern("index"),
2327 Some(vec![input_ty]))
2329 (method, _) => method,
2332 // If some lookup succeeds, write callee into table and extract index/element
2333 // type from the method signature.
2334 // If some lookup succeeded, install method in table
2335 method.and_then(|method| {
2336 debug!("try_index_step: success, using overloaded indexing");
2337 make_overloaded_lvalue_return_type(fcx, Some(method_call), Some(method)).
2338 map(|ret| (input_ty, ret.ty))
2342 fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2344 method_fn_ty: Ty<'tcx>,
2345 callee_expr: &'tcx hir::Expr,
2346 args_no_rcvr: &'tcx [P<hir::Expr>],
2347 tuple_arguments: TupleArgumentsFlag,
2348 expected: Expectation<'tcx>)
2349 -> ty::FnOutput<'tcx> {
2350 if method_fn_ty.references_error() {
2351 let err_inputs = err_args(fcx.tcx(), args_no_rcvr.len());
2353 let err_inputs = match tuple_arguments {
2354 DontTupleArguments => err_inputs,
2355 TupleArguments => vec![fcx.tcx().mk_tup(err_inputs)],
2358 check_argument_types(fcx,
2365 ty::FnConverging(fcx.tcx().types.err)
2367 match method_fn_ty.sty {
2368 ty::TyBareFn(_, ref fty) => {
2369 // HACK(eddyb) ignore self in the definition (see above).
2370 let expected_arg_tys = expected_types_for_fn_args(fcx,
2374 &fty.sig.0.inputs[1..]);
2375 check_argument_types(fcx,
2377 &fty.sig.0.inputs[1..],
2378 &expected_arg_tys[..],
2385 fcx.tcx().sess.span_bug(callee_expr.span,
2386 "method without bare fn type");
2392 /// Generic function that factors out common logic from function calls, method calls and overloaded
2394 fn check_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2396 fn_inputs: &[Ty<'tcx>],
2397 expected_arg_tys: &[Ty<'tcx>],
2398 args: &'tcx [P<hir::Expr>],
2400 tuple_arguments: TupleArgumentsFlag) {
2401 let tcx = fcx.ccx.tcx;
2403 // Grab the argument types, supplying fresh type variables
2404 // if the wrong number of arguments were supplied
2405 let supplied_arg_count = if tuple_arguments == DontTupleArguments {
2411 // All the input types from the fn signature must outlive the call
2412 // so as to validate implied bounds.
2413 for &fn_input_ty in fn_inputs {
2414 fcx.register_wf_obligation(fn_input_ty, sp, traits::MiscObligation);
2417 let mut expected_arg_tys = expected_arg_tys;
2418 let expected_arg_count = fn_inputs.len();
2419 let formal_tys = if tuple_arguments == TupleArguments {
2420 let tuple_type = structurally_resolved_type(fcx, sp, fn_inputs[0]);
2421 match tuple_type.sty {
2422 ty::TyTuple(ref arg_types) => {
2423 if arg_types.len() != args.len() {
2424 span_err!(tcx.sess, sp, E0057,
2425 "this function takes {} parameter{} but {} parameter{} supplied",
2427 if arg_types.len() == 1 {""} else {"s"},
2429 if args.len() == 1 {" was"} else {"s were"});
2430 expected_arg_tys = &[];
2431 err_args(fcx.tcx(), args.len())
2433 expected_arg_tys = match expected_arg_tys.get(0) {
2434 Some(&ty) => match ty.sty {
2435 ty::TyTuple(ref tys) => &**tys,
2440 (*arg_types).clone()
2444 span_err!(tcx.sess, sp, E0059,
2445 "cannot use call notation; the first type parameter \
2446 for the function trait is neither a tuple nor unit");
2447 expected_arg_tys = &[];
2448 err_args(fcx.tcx(), args.len())
2451 } else if expected_arg_count == supplied_arg_count {
2453 } else if variadic {
2454 if supplied_arg_count >= expected_arg_count {
2457 span_err!(tcx.sess, sp, E0060,
2458 "this function takes at least {} parameter{} \
2459 but {} parameter{} supplied",
2461 if expected_arg_count == 1 {""} else {"s"},
2463 if supplied_arg_count == 1 {" was"} else {"s were"});
2464 expected_arg_tys = &[];
2465 err_args(fcx.tcx(), supplied_arg_count)
2468 span_err!(tcx.sess, sp, E0061,
2469 "this function takes {} parameter{} but {} parameter{} supplied",
2471 if expected_arg_count == 1 {""} else {"s"},
2473 if supplied_arg_count == 1 {" was"} else {"s were"});
2474 expected_arg_tys = &[];
2475 err_args(fcx.tcx(), supplied_arg_count)
2478 debug!("check_argument_types: formal_tys={:?}",
2479 formal_tys.iter().map(|t| fcx.infcx().ty_to_string(*t)).collect::<Vec<String>>());
2481 // Check the arguments.
2482 // We do this in a pretty awful way: first we typecheck any arguments
2483 // that are not anonymous functions, then we typecheck the anonymous
2484 // functions. This is so that we have more information about the types
2485 // of arguments when we typecheck the functions. This isn't really the
2486 // right way to do this.
2487 let xs = [false, true];
2488 let mut any_diverges = false; // has any of the arguments diverged?
2489 let mut warned = false; // have we already warned about unreachable code?
2490 for check_blocks in &xs {
2491 let check_blocks = *check_blocks;
2492 debug!("check_blocks={}", check_blocks);
2494 // More awful hacks: before we check argument types, try to do
2495 // an "opportunistic" vtable resolution of any trait bounds on
2496 // the call. This helps coercions.
2498 fcx.select_new_obligations();
2501 // For variadic functions, we don't have a declared type for all of
2502 // the arguments hence we only do our usual type checking with
2503 // the arguments who's types we do know.
2504 let t = if variadic {
2506 } else if tuple_arguments == TupleArguments {
2511 for (i, arg) in args.iter().take(t).enumerate() {
2512 if any_diverges && !warned {
2516 .add_lint(lint::builtin::UNREACHABLE_CODE,
2519 "unreachable expression".to_string());
2522 let is_block = match arg.node {
2523 hir::ExprClosure(..) => true,
2527 if is_block == check_blocks {
2528 debug!("checking the argument");
2529 let formal_ty = formal_tys[i];
2531 // The special-cased logic below has three functions:
2532 // 1. Provide as good of an expected type as possible.
2533 let expected = expected_arg_tys.get(i).map(|&ty| {
2534 Expectation::rvalue_hint(fcx.tcx(), ty)
2537 check_expr_with_unifier(fcx,
2539 expected.unwrap_or(ExpectHasType(formal_ty)),
2541 // 2. Coerce to the most detailed type that could be coerced
2542 // to, which is `expected_ty` if `rvalue_hint` returns an
2543 // `ExprHasType(expected_ty)`, or the `formal_ty` otherwise.
2544 let coerce_ty = expected.and_then(|e| e.only_has_type(fcx));
2545 demand::coerce(fcx, arg.span, coerce_ty.unwrap_or(formal_ty), &**arg);
2547 // 3. Relate the expected type and the formal one,
2548 // if the expected type was used for the coercion.
2549 coerce_ty.map(|ty| demand::suptype(fcx, arg.span, formal_ty, ty));
2553 if let Some(&arg_ty) = fcx.inh.tables.borrow().node_types.get(&arg.id) {
2554 any_diverges = any_diverges || fcx.infcx().type_var_diverges(arg_ty);
2557 if any_diverges && !warned {
2558 let parent = fcx.ccx.tcx.map.get_parent_node(args[0].id);
2562 .add_lint(lint::builtin::UNREACHABLE_CODE,
2565 "unreachable call".to_string());
2571 // We also need to make sure we at least write the ty of the other
2572 // arguments which we skipped above.
2574 for arg in args.iter().skip(expected_arg_count) {
2575 check_expr(fcx, &**arg);
2577 // There are a few types which get autopromoted when passed via varargs
2578 // in C but we just error out instead and require explicit casts.
2579 let arg_ty = structurally_resolved_type(fcx, arg.span,
2580 fcx.expr_ty(&**arg));
2582 ty::TyFloat(ast::TyF32) => {
2583 fcx.type_error_message(arg.span,
2585 format!("can't pass an {} to variadic \
2586 function, cast to c_double", t)
2589 ty::TyInt(ast::TyI8) | ty::TyInt(ast::TyI16) | ty::TyBool => {
2590 fcx.type_error_message(arg.span, |t| {
2591 format!("can't pass {} to variadic \
2592 function, cast to c_int",
2596 ty::TyUint(ast::TyU8) | ty::TyUint(ast::TyU16) => {
2597 fcx.type_error_message(arg.span, |t| {
2598 format!("can't pass {} to variadic \
2599 function, cast to c_uint",
2609 // FIXME(#17596) Ty<'tcx> is incorrectly invariant w.r.t 'tcx.
2610 fn err_args<'tcx>(tcx: &ty::ctxt<'tcx>, len: usize) -> Vec<Ty<'tcx>> {
2611 (0..len).map(|_| tcx.types.err).collect()
2614 fn write_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2615 call_expr: &hir::Expr,
2616 output: ty::FnOutput<'tcx>) {
2617 fcx.write_ty(call_expr.id, match output {
2618 ty::FnConverging(output_ty) => output_ty,
2619 ty::FnDiverging => fcx.infcx().next_diverging_ty_var()
2623 // AST fragment checking
2624 fn check_lit<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2626 expected: Expectation<'tcx>)
2629 let tcx = fcx.ccx.tcx;
2632 ast::LitStr(..) => tcx.mk_static_str(),
2633 ast::LitByteStr(ref v) => {
2634 tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic),
2635 tcx.mk_array(tcx.types.u8, v.len()))
2637 ast::LitByte(_) => tcx.types.u8,
2638 ast::LitChar(_) => tcx.types.char,
2639 ast::LitInt(_, ast::SignedIntLit(t, _)) => tcx.mk_mach_int(t),
2640 ast::LitInt(_, ast::UnsignedIntLit(t)) => tcx.mk_mach_uint(t),
2641 ast::LitInt(_, ast::UnsuffixedIntLit(_)) => {
2642 let opt_ty = expected.to_option(fcx).and_then(|ty| {
2644 ty::TyInt(_) | ty::TyUint(_) => Some(ty),
2645 ty::TyChar => Some(tcx.types.u8),
2646 ty::TyRawPtr(..) => Some(tcx.types.usize),
2647 ty::TyBareFn(..) => Some(tcx.types.usize),
2651 opt_ty.unwrap_or_else(
2652 || tcx.mk_int_var(fcx.infcx().next_int_var_id()))
2654 ast::LitFloat(_, t) => tcx.mk_mach_float(t),
2655 ast::LitFloatUnsuffixed(_) => {
2656 let opt_ty = expected.to_option(fcx).and_then(|ty| {
2658 ty::TyFloat(_) => Some(ty),
2662 opt_ty.unwrap_or_else(
2663 || tcx.mk_float_var(fcx.infcx().next_float_var_id()))
2665 ast::LitBool(_) => tcx.types.bool
2669 pub fn check_expr_has_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2670 expr: &'tcx hir::Expr,
2671 expected: Ty<'tcx>) {
2672 check_expr_with_unifier(
2673 fcx, expr, ExpectHasType(expected), NoPreference,
2674 || demand::suptype(fcx, expr.span, expected, fcx.expr_ty(expr)));
2677 fn check_expr_coercable_to_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2678 expr: &'tcx hir::Expr,
2679 expected: Ty<'tcx>) {
2680 check_expr_with_unifier(
2681 fcx, expr, ExpectHasType(expected), NoPreference,
2682 || demand::coerce(fcx, expr.span, expected, expr));
2685 fn check_expr_with_hint<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, expr: &'tcx hir::Expr,
2686 expected: Ty<'tcx>) {
2687 check_expr_with_unifier(
2688 fcx, expr, ExpectHasType(expected), NoPreference,
2692 fn check_expr_with_expectation<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2693 expr: &'tcx hir::Expr,
2694 expected: Expectation<'tcx>) {
2695 check_expr_with_unifier(
2696 fcx, expr, expected, NoPreference,
2700 fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2701 expr: &'tcx hir::Expr,
2702 expected: Expectation<'tcx>,
2703 lvalue_pref: LvaluePreference)
2705 check_expr_with_unifier(fcx, expr, expected, lvalue_pref, || ())
2708 fn check_expr<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, expr: &'tcx hir::Expr) {
2709 check_expr_with_unifier(fcx, expr, NoExpectation, NoPreference, || ())
2712 fn check_expr_with_lvalue_pref<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, expr: &'tcx hir::Expr,
2713 lvalue_pref: LvaluePreference) {
2714 check_expr_with_unifier(fcx, expr, NoExpectation, lvalue_pref, || ())
2717 // determine the `self` type, using fresh variables for all variables
2718 // declared on the impl declaration e.g., `impl<A,B> for Vec<(A,B)>`
2719 // would return ($0, $1) where $0 and $1 are freshly instantiated type
2721 pub fn impl_self_ty<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2722 span: Span, // (potential) receiver for this impl
2724 -> TypeAndSubsts<'tcx> {
2725 let tcx = fcx.tcx();
2727 let ity = tcx.lookup_item_type(did);
2728 let (tps, rps, raw_ty) =
2729 (ity.generics.types.get_slice(subst::TypeSpace),
2730 ity.generics.regions.get_slice(subst::TypeSpace),
2733 debug!("impl_self_ty: tps={:?} rps={:?} raw_ty={:?}", tps, rps, raw_ty);
2735 let rps = fcx.inh.infcx.region_vars_for_defs(span, rps);
2736 let mut substs = subst::Substs::new(
2737 VecPerParamSpace::empty(),
2738 VecPerParamSpace::new(rps, Vec::new(), Vec::new()));
2739 fcx.inh.infcx.type_vars_for_defs(span, ParamSpace::TypeSpace, &mut substs, tps);
2740 let substd_ty = fcx.instantiate_type_scheme(span, &substs, &raw_ty);
2742 TypeAndSubsts { substs: substs, ty: substd_ty }
2745 /// Controls whether the arguments are tupled. This is used for the call
2748 /// Tupling means that all call-side arguments are packed into a tuple and
2749 /// passed as a single parameter. For example, if tupling is enabled, this
2752 /// fn f(x: (isize, isize))
2754 /// Can be called as:
2761 #[derive(Clone, Eq, PartialEq)]
2762 enum TupleArgumentsFlag {
2767 /// Unifies the return type with the expected type early, for more coercions
2768 /// and forward type information on the argument expressions.
2769 fn expected_types_for_fn_args<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2771 expected_ret: Expectation<'tcx>,
2772 formal_ret: ty::FnOutput<'tcx>,
2773 formal_args: &[Ty<'tcx>])
2775 let expected_args = expected_ret.only_has_type(fcx).and_then(|ret_ty| {
2776 if let ty::FnConverging(formal_ret_ty) = formal_ret {
2777 fcx.infcx().commit_regions_if_ok(|| {
2778 // Attempt to apply a subtyping relationship between the formal
2779 // return type (likely containing type variables if the function
2780 // is polymorphic) and the expected return type.
2781 // No argument expectations are produced if unification fails.
2782 let origin = TypeOrigin::Misc(call_span);
2783 let ures = fcx.infcx().sub_types(false, origin, formal_ret_ty, ret_ty);
2784 // FIXME(#15760) can't use try! here, FromError doesn't default
2785 // to identity so the resulting type is not constrained.
2786 if let Err(e) = ures {
2790 // Record all the argument types, with the substitutions
2791 // produced from the above subtyping unification.
2792 Ok(formal_args.iter().map(|ty| {
2793 fcx.infcx().resolve_type_vars_if_possible(ty)
2799 }).unwrap_or(vec![]);
2800 debug!("expected_types_for_fn_args(formal={:?} -> {:?}, expected={:?} -> {:?})",
2801 formal_args, formal_ret,
2802 expected_args, expected_ret);
2807 /// If an expression has any sub-expressions that result in a type error,
2808 /// inspecting that expression's type with `ty.references_error()` will return
2809 /// true. Likewise, if an expression is known to diverge, inspecting its
2810 /// type with `ty::type_is_bot` will return true (n.b.: since Rust is
2811 /// strict, _|_ can appear in the type of an expression that does not,
2812 /// itself, diverge: for example, fn() -> _|_.)
2813 /// Note that inspecting a type's structure *directly* may expose the fact
2814 /// that there are actually multiple representations for `TyError`, so avoid
2815 /// that when err needs to be handled differently.
2816 fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
2817 expr: &'tcx hir::Expr,
2818 expected: Expectation<'tcx>,
2819 lvalue_pref: LvaluePreference,
2823 debug!(">> typechecking: expr={:?} expected={:?}",
2826 // Checks a method call.
2827 fn check_method_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2828 expr: &'tcx hir::Expr,
2829 method_name: Spanned<ast::Name>,
2830 args: &'tcx [P<hir::Expr>],
2832 expected: Expectation<'tcx>,
2833 lvalue_pref: LvaluePreference) {
2834 let rcvr = &*args[0];
2835 check_expr_with_lvalue_pref(fcx, &*rcvr, lvalue_pref);
2837 // no need to check for bot/err -- callee does that
2838 let expr_t = structurally_resolved_type(fcx,
2840 fcx.expr_ty(&*rcvr));
2842 let tps = tps.iter().map(|ast_ty| fcx.to_ty(&**ast_ty)).collect::<Vec<_>>();
2843 let fn_ty = match method::lookup(fcx,
2851 let method_ty = method.ty;
2852 let method_call = MethodCall::expr(expr.id);
2853 fcx.inh.tables.borrow_mut().method_map.insert(method_call, method);
2857 method::report_error(fcx, method_name.span, expr_t,
2858 method_name.node, Some(rcvr), error);
2859 fcx.write_error(expr.id);
2864 // Call the generic checker.
2865 let ret_ty = check_method_argument_types(fcx,
2873 write_call(fcx, expr, ret_ty);
2876 // A generic function for checking the then and else in an if
2878 fn check_then_else<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2879 cond_expr: &'tcx hir::Expr,
2880 then_blk: &'tcx hir::Block,
2881 opt_else_expr: Option<&'tcx hir::Expr>,
2884 expected: Expectation<'tcx>) {
2885 check_expr_has_type(fcx, cond_expr, fcx.tcx().types.bool);
2887 let expected = expected.adjust_for_branches(fcx);
2888 check_block_with_expected(fcx, then_blk, expected);
2889 let then_ty = fcx.node_ty(then_blk.id);
2891 let branches_ty = match opt_else_expr {
2892 Some(ref else_expr) => {
2893 check_expr_with_expectation(fcx, &**else_expr, expected);
2894 let else_ty = fcx.expr_ty(&**else_expr);
2895 infer::common_supertype(fcx.infcx(),
2896 TypeOrigin::IfExpression(sp),
2902 infer::common_supertype(fcx.infcx(),
2903 TypeOrigin::IfExpressionWithNoElse(sp),
2910 let cond_ty = fcx.expr_ty(cond_expr);
2911 let if_ty = if cond_ty.references_error() {
2917 fcx.write_ty(id, if_ty);
2920 // Check field access expressions
2921 fn check_field<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
2922 expr: &'tcx hir::Expr,
2923 lvalue_pref: LvaluePreference,
2924 base: &'tcx hir::Expr,
2925 field: &Spanned<ast::Name>) {
2926 let tcx = fcx.ccx.tcx;
2927 check_expr_with_lvalue_pref(fcx, base, lvalue_pref);
2928 let expr_t = structurally_resolved_type(fcx, expr.span,
2930 // FIXME(eddyb) #12808 Integrate privacy into this auto-deref loop.
2931 let (_, autoderefs, field_ty) = autoderef(fcx,
2935 UnresolvedTypeAction::Error,
2939 ty::TyStruct(base_def, substs) => {
2940 debug!("struct named {:?}", base_t);
2941 base_def.struct_variant()
2942 .find_field_named(field.node)
2943 .map(|f| fcx.field_ty(expr.span, f, substs))
2950 fcx.write_ty(expr.id, field_ty);
2951 fcx.write_autoderef_adjustment(base.id, autoderefs);
2957 if method::exists(fcx, field.span, field.node, expr_t, expr.id) {
2958 fcx.type_error_message(
2961 format!("attempted to take value of method `{}` on type \
2962 `{}`", field.node, actual)
2966 tcx.sess.fileline_help(field.span,
2967 "maybe a `()` to call it is missing? \
2968 If not, try an anonymous function");
2970 fcx.type_error_message(
2973 format!("attempted access of field `{}` on \
2974 type `{}`, but no field with that \
2980 if let ty::TyStruct(def, _) = expr_t.sty {
2981 suggest_field_names(def.struct_variant(), field, tcx, vec![]);
2985 fcx.write_error(expr.id);
2988 // displays hints about the closest matches in field names
2989 fn suggest_field_names<'tcx>(variant: ty::VariantDef<'tcx>,
2990 field: &Spanned<ast::Name>,
2991 tcx: &ty::ctxt<'tcx>,
2992 skip : Vec<InternedString>) {
2993 let name = field.node.as_str();
2994 // only find fits with at least one matching letter
2995 let mut best_dist = name.len();
2996 let mut best = None;
2997 for elem in &variant.fields {
2998 let n = elem.name.as_str();
2999 // ignore already set fields
3000 if skip.iter().any(|x| *x == n) {
3003 // ignore private fields from non-local crates
3004 if variant.did.krate != LOCAL_CRATE && elem.vis != Visibility::Public {
3007 let dist = lev_distance(&n, &name);
3008 if dist < best_dist {
3013 if let Some(n) = best {
3014 tcx.sess.span_help(field.span,
3015 &format!("did you mean `{}`?", n));
3019 // Check tuple index expressions
3020 fn check_tup_field<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
3021 expr: &'tcx hir::Expr,
3022 lvalue_pref: LvaluePreference,
3023 base: &'tcx hir::Expr,
3024 idx: codemap::Spanned<usize>) {
3025 check_expr_with_lvalue_pref(fcx, base, lvalue_pref);
3026 let expr_t = structurally_resolved_type(fcx, expr.span,
3028 let mut tuple_like = false;
3029 // FIXME(eddyb) #12808 Integrate privacy into this auto-deref loop.
3030 let (_, autoderefs, field_ty) = autoderef(fcx,
3034 UnresolvedTypeAction::Error,
3038 ty::TyStruct(base_def, substs) => {
3039 tuple_like = base_def.struct_variant().is_tuple_struct();
3041 debug!("tuple struct named {:?}", base_t);
3042 base_def.struct_variant()
3045 .map(|f| fcx.field_ty(expr.span, f, substs))
3050 ty::TyTuple(ref v) => {
3052 if idx.node < v.len() { Some(v[idx.node]) } else { None }
3059 fcx.write_ty(expr.id, field_ty);
3060 fcx.write_autoderef_adjustment(base.id, autoderefs);
3065 fcx.type_error_message(
3069 format!("attempted out-of-bounds tuple index `{}` on \
3074 format!("attempted tuple index `{}` on type `{}`, but the \
3075 type was not a tuple or tuple struct",
3082 fcx.write_error(expr.id);
3085 fn report_unknown_field<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
3087 variant: ty::VariantDef<'tcx>,
3089 skip_fields: &[hir::Field]) {
3090 fcx.type_error_message(
3092 |actual| if let ty::TyEnum(..) = ty.sty {
3093 format!("struct variant `{}::{}` has no field named `{}`",
3094 actual, variant.name.as_str(), field.name.node)
3096 format!("structure `{}` has no field named `{}`",
3097 actual, field.name.node)
3101 // prevent all specified fields from being suggested
3102 let skip_fields = skip_fields.iter().map(|ref x| x.name.node.as_str());
3103 suggest_field_names(variant, &field.name, fcx.tcx(), skip_fields.collect());
3106 fn check_expr_struct_fields<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
3109 variant: ty::VariantDef<'tcx>,
3110 ast_fields: &'tcx [hir::Field],
3111 check_completeness: bool) {
3112 let tcx = fcx.ccx.tcx;
3113 let substs = match adt_ty.sty {
3114 ty::TyStruct(_, substs) | ty::TyEnum(_, substs) => substs,
3115 _ => tcx.sess.span_bug(span, "non-ADT passed to check_expr_struct_fields")
3118 let mut remaining_fields = FnvHashMap();
3119 for field in &variant.fields {
3120 remaining_fields.insert(field.name, field);
3123 let mut error_happened = false;
3125 // Typecheck each field.
3126 for field in ast_fields {
3127 let expected_field_type;
3129 if let Some(v_field) = remaining_fields.remove(&field.name.node) {
3130 expected_field_type = fcx.field_ty(field.span, v_field, substs);
3132 error_happened = true;
3133 expected_field_type = tcx.types.err;
3134 if let Some(_) = variant.find_field_named(field.name.node) {
3135 span_err!(fcx.tcx().sess, field.name.span, E0062,
3136 "field `{}` specified more than once",
3139 report_unknown_field(fcx, adt_ty, variant, field, ast_fields);
3143 // Make sure to give a type to the field even if there's
3144 // an error, so we can continue typechecking
3145 check_expr_coercable_to_type(fcx, &*field.expr, expected_field_type);
3148 // Make sure the programmer specified all the fields.
3149 if check_completeness &&
3151 !remaining_fields.is_empty()
3153 span_err!(tcx.sess, span, E0063,
3154 "missing field{}: {}",
3155 if remaining_fields.len() == 1 {""} else {"s"},
3156 remaining_fields.keys()
3157 .map(|n| format!("`{}`", n))
3158 .collect::<Vec<_>>()
3164 fn check_struct_fields_on_error<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
3166 fields: &'tcx [hir::Field],
3167 base_expr: &'tcx Option<P<hir::Expr>>) {
3168 // Make sure to still write the types
3169 // otherwise we might ICE
3170 fcx.write_error(id);
3171 for field in fields {
3172 check_expr(fcx, &*field.expr);
3175 Some(ref base) => check_expr(fcx, &**base),
3180 fn check_expr_struct<'a, 'tcx>(fcx: &FnCtxt<'a,'tcx>,
3183 fields: &'tcx [hir::Field],
3184 base_expr: &'tcx Option<P<hir::Expr>>)
3186 let tcx = fcx.tcx();
3188 // Find the relevant variant
3189 let def = lookup_full_def(tcx, path.span, expr.id);
3190 if def == def::DefErr {
3191 check_struct_fields_on_error(fcx, expr.id, fields, base_expr);
3194 let (adt, variant) = match fcx.def_struct_variant(def, path.span) {
3195 Some((adt, variant)) => (adt, variant),
3197 span_err!(fcx.tcx().sess, path.span, E0071,
3198 "`{}` does not name a structure",
3199 pprust::path_to_string(path));
3200 check_struct_fields_on_error(fcx, expr.id, fields, base_expr);
3205 let expr_ty = fcx.instantiate_type(def.def_id(), path);
3206 fcx.write_ty(expr.id, expr_ty);
3208 check_expr_struct_fields(fcx, expr_ty, expr.span, variant, fields,
3209 base_expr.is_none());
3211 if let &Some(ref base_expr) = base_expr {
3212 check_expr_has_type(fcx, base_expr, expr_ty);
3213 if adt.adt_kind() == ty::AdtKind::Enum {
3214 span_err!(tcx.sess, base_expr.span, E0436,
3215 "functional record update syntax requires a struct");
3220 type ExprCheckerWithTy = fn(&FnCtxt, &hir::Expr, Ty);
3222 let tcx = fcx.ccx.tcx;
3225 hir::ExprBox(ref subexpr) => {
3226 let expected_inner = expected.to_option(fcx).map_or(NoExpectation, |ty| {
3228 ty::TyBox(ty) => Expectation::rvalue_hint(tcx, ty),
3232 check_expr_with_expectation(fcx, subexpr, expected_inner);
3233 let referent_ty = fcx.expr_ty(&**subexpr);
3234 fcx.write_ty(id, tcx.mk_box(referent_ty));
3237 hir::ExprLit(ref lit) => {
3238 let typ = check_lit(fcx, &**lit, expected);
3239 fcx.write_ty(id, typ);
3241 hir::ExprBinary(op, ref lhs, ref rhs) => {
3242 op::check_binop(fcx, expr, op, lhs, rhs);
3244 hir::ExprAssignOp(op, ref lhs, ref rhs) => {
3245 op::check_binop_assign(fcx, expr, op, lhs, rhs);
3247 hir::ExprUnary(unop, ref oprnd) => {
3248 let expected_inner = match unop {
3249 hir::UnNot | hir::UnNeg => {
3256 let lvalue_pref = match unop {
3257 hir::UnDeref => lvalue_pref,
3260 check_expr_with_expectation_and_lvalue_pref(
3261 fcx, &**oprnd, expected_inner, lvalue_pref);
3262 let mut oprnd_t = fcx.expr_ty(&**oprnd);
3264 if !oprnd_t.references_error() {
3267 oprnd_t = structurally_resolved_type(fcx, expr.span, oprnd_t);
3268 oprnd_t = match oprnd_t.builtin_deref(true, NoPreference) {
3270 None => match try_overloaded_deref(fcx, expr.span,
3271 Some(MethodCall::expr(expr.id)),
3272 Some(&**oprnd), oprnd_t, lvalue_pref) {
3275 fcx.type_error_message(expr.span, |actual| {
3276 format!("type `{}` cannot be \
3277 dereferenced", actual)
3285 oprnd_t = structurally_resolved_type(fcx, oprnd.span,
3287 if !(oprnd_t.is_integral() || oprnd_t.sty == ty::TyBool) {
3288 oprnd_t = op::check_user_unop(fcx, "!", "not",
3289 tcx.lang_items.not_trait(),
3290 expr, &**oprnd, oprnd_t, unop);
3294 oprnd_t = structurally_resolved_type(fcx, oprnd.span,
3296 if !(oprnd_t.is_integral() || oprnd_t.is_fp()) {
3297 oprnd_t = op::check_user_unop(fcx, "-", "neg",
3298 tcx.lang_items.neg_trait(),
3299 expr, &**oprnd, oprnd_t, unop);
3304 fcx.write_ty(id, oprnd_t);
3306 hir::ExprAddrOf(mutbl, ref oprnd) => {
3307 let hint = expected.only_has_type(fcx).map_or(NoExpectation, |ty| {
3309 ty::TyRef(_, ref mt) | ty::TyRawPtr(ref mt) => {
3310 if fcx.tcx().expr_is_lval(&**oprnd) {
3311 // Lvalues may legitimately have unsized types.
3312 // For example, dereferences of a fat pointer and
3313 // the last field of a struct can be unsized.
3314 ExpectHasType(mt.ty)
3316 Expectation::rvalue_hint(tcx, mt.ty)
3322 let lvalue_pref = LvaluePreference::from_mutbl(mutbl);
3323 check_expr_with_expectation_and_lvalue_pref(fcx,
3328 let tm = ty::TypeAndMut { ty: fcx.expr_ty(&**oprnd), mutbl: mutbl };
3329 let oprnd_t = if tm.ty.references_error() {
3332 // Note: at this point, we cannot say what the best lifetime
3333 // is to use for resulting pointer. We want to use the
3334 // shortest lifetime possible so as to avoid spurious borrowck
3335 // errors. Moreover, the longest lifetime will depend on the
3336 // precise details of the value whose address is being taken
3337 // (and how long it is valid), which we don't know yet until type
3338 // inference is complete.
3340 // Therefore, here we simply generate a region variable. The
3341 // region inferencer will then select the ultimate value.
3342 // Finally, borrowck is charged with guaranteeing that the
3343 // value whose address was taken can actually be made to live
3344 // as long as it needs to live.
3345 let region = fcx.infcx().next_region_var(infer::AddrOfRegion(expr.span));
3346 tcx.mk_ref(tcx.mk_region(region), tm)
3348 fcx.write_ty(id, oprnd_t);
3350 hir::ExprPath(ref maybe_qself, ref path) => {
3351 let opt_self_ty = maybe_qself.as_ref().map(|qself| {
3352 fcx.to_ty(&qself.ty)
3355 let path_res = if let Some(&d) = tcx.def_map.borrow().get(&id) {
3357 } else if let Some(hir::QSelf { position: 0, .. }) = *maybe_qself {
3358 // Create some fake resolution that can't possibly be a type.
3359 def::PathResolution {
3360 base_def: def::DefMod(tcx.map.local_def_id(ast::CRATE_NODE_ID)),
3361 last_private: LastMod(AllPublic),
3362 depth: path.segments.len()
3365 tcx.sess.span_bug(expr.span,
3366 &format!("unbound path {:?}", expr))
3369 if let Some((opt_ty, segments, def)) =
3370 resolve_ty_and_def_ufcs(fcx, path_res, opt_self_ty, path,
3371 expr.span, expr.id) {
3372 if def != def::DefErr {
3373 let (scheme, predicates) = type_scheme_and_predicates_for_def(fcx,
3376 instantiate_path(fcx,
3385 fcx.write_ty(id, fcx.tcx().types.err);
3389 // We always require that the type provided as the value for
3390 // a type parameter outlives the moment of instantiation.
3391 fcx.opt_node_ty_substs(expr.id, |item_substs| {
3392 fcx.add_wf_bounds(&item_substs.substs, expr);
3395 hir::ExprInlineAsm(ref ia) => {
3396 for &(_, ref input) in &ia.inputs {
3397 check_expr(fcx, &**input);
3399 for &(_, ref out, _) in &ia.outputs {
3400 check_expr(fcx, &**out);
3404 hir::ExprBreak(_) => { fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); }
3405 hir::ExprAgain(_) => { fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); }
3406 hir::ExprRet(ref expr_opt) => {
3408 ty::FnConverging(result_type) => {
3411 if let Err(_) = fcx.mk_eqty(false, TypeOrigin::Misc(expr.span),
3412 result_type, fcx.tcx().mk_nil()) {
3413 span_err!(tcx.sess, expr.span, E0069,
3414 "`return;` in a function whose return type is \
3418 check_expr_coercable_to_type(fcx, &**e, result_type);
3422 ty::FnDiverging => {
3423 if let Some(ref e) = *expr_opt {
3424 check_expr(fcx, &**e);
3426 span_err!(tcx.sess, expr.span, E0166,
3427 "`return` in a function declared as diverging");
3430 fcx.write_ty(id, fcx.infcx().next_diverging_ty_var());
3432 hir::ExprAssign(ref lhs, ref rhs) => {
3433 check_expr_with_lvalue_pref(fcx, &**lhs, PreferMutLvalue);
3435 let tcx = fcx.tcx();
3436 if !tcx.expr_is_lval(&**lhs) {
3437 span_err!(tcx.sess, expr.span, E0070,
3438 "invalid left-hand side expression");
3441 let lhs_ty = fcx.expr_ty(&**lhs);
3442 check_expr_coercable_to_type(fcx, &**rhs, lhs_ty);
3443 let rhs_ty = fcx.expr_ty(&**rhs);
3445 fcx.require_expr_have_sized_type(&**lhs, traits::AssignmentLhsSized);
3447 if lhs_ty.references_error() || rhs_ty.references_error() {
3448 fcx.write_error(id);
3453 hir::ExprIf(ref cond, ref then_blk, ref opt_else_expr) => {
3454 check_then_else(fcx, &**cond, &**then_blk, opt_else_expr.as_ref().map(|e| &**e),
3455 id, expr.span, expected);
3457 hir::ExprWhile(ref cond, ref body, _) => {
3458 check_expr_has_type(fcx, &**cond, tcx.types.bool);
3459 check_block_no_value(fcx, &**body);
3460 let cond_ty = fcx.expr_ty(&**cond);
3461 let body_ty = fcx.node_ty(body.id);
3462 if cond_ty.references_error() || body_ty.references_error() {
3463 fcx.write_error(id);
3469 hir::ExprLoop(ref body, _) => {
3470 check_block_no_value(fcx, &**body);
3471 if !may_break(tcx, expr.id, &**body) {
3472 fcx.write_ty(id, fcx.infcx().next_diverging_ty_var());
3477 hir::ExprMatch(ref discrim, ref arms, match_src) => {
3478 _match::check_match(fcx, expr, &**discrim, arms, expected, match_src);
3480 hir::ExprClosure(capture, ref decl, ref body) => {
3481 closure::check_expr_closure(fcx, expr, capture, &**decl, &**body, expected);
3483 hir::ExprBlock(ref b) => {
3484 check_block_with_expected(fcx, &**b, expected);
3485 fcx.write_ty(id, fcx.node_ty(b.id));
3487 hir::ExprCall(ref callee, ref args) => {
3488 callee::check_call(fcx, expr, &**callee, &args[..], expected);
3490 // we must check that return type of called functions is WF:
3491 let ret_ty = fcx.expr_ty(expr);
3492 fcx.register_wf_obligation(ret_ty, expr.span, traits::MiscObligation);
3494 hir::ExprMethodCall(name, ref tps, ref args) => {
3495 check_method_call(fcx, expr, name, &args[..], &tps[..], expected, lvalue_pref);
3496 let arg_tys = args.iter().map(|a| fcx.expr_ty(&**a));
3497 let args_err = arg_tys.fold(false, |rest_err, a| rest_err || a.references_error());
3499 fcx.write_error(id);
3502 hir::ExprCast(ref e, ref t) => {
3503 if let hir::TyFixedLengthVec(_, ref count_expr) = t.node {
3504 check_expr_with_hint(fcx, &**count_expr, tcx.types.usize);
3507 // Find the type of `e`. Supply hints based on the type we are casting to,
3509 let t_cast = fcx.to_ty(t);
3510 let t_cast = structurally_resolved_type(fcx, expr.span, t_cast);
3511 check_expr_with_expectation(fcx, e, ExpectCastableToType(t_cast));
3512 let t_expr = fcx.expr_ty(e);
3514 // Eagerly check for some obvious errors.
3515 if t_expr.references_error() {
3516 fcx.write_error(id);
3517 } else if !fcx.type_is_known_to_be_sized(t_cast, expr.span) {
3518 report_cast_to_unsized_type(fcx, expr.span, t.span, e.span, t_cast, t_expr, id);
3520 // Write a type for the whole expression, assuming everything is going
3522 fcx.write_ty(id, t_cast);
3524 // Defer other checks until we're done type checking.
3525 let mut deferred_cast_checks = fcx.inh.deferred_cast_checks.borrow_mut();
3526 let cast_check = cast::CastCheck::new((**e).clone(), t_expr, t_cast, expr.span);
3527 deferred_cast_checks.push(cast_check);
3530 hir::ExprVec(ref args) => {
3531 let uty = expected.to_option(fcx).and_then(|uty| {
3533 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3538 let typ = match uty {
3541 check_expr_coercable_to_type(fcx, &**e, uty);
3546 let t: Ty = fcx.infcx().next_ty_var();
3548 check_expr_has_type(fcx, &**e, t);
3553 let typ = tcx.mk_array(typ, args.len());
3554 fcx.write_ty(id, typ);
3556 hir::ExprRepeat(ref element, ref count_expr) => {
3557 check_expr_has_type(fcx, &**count_expr, tcx.types.usize);
3558 let count = fcx.tcx().eval_repeat_count(&**count_expr);
3560 let uty = match expected {
3561 ExpectHasType(uty) => {
3563 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3570 let (element_ty, t) = match uty {
3572 check_expr_coercable_to_type(fcx, &**element, uty);
3576 let t: Ty = fcx.infcx().next_ty_var();
3577 check_expr_has_type(fcx, &**element, t);
3578 (fcx.expr_ty(&**element), t)
3583 // For [foo, ..n] where n > 1, `foo` must have
3585 fcx.require_type_meets(
3592 if element_ty.references_error() {
3593 fcx.write_error(id);
3595 let t = tcx.mk_array(t, count);
3596 fcx.write_ty(id, t);
3599 hir::ExprTup(ref elts) => {
3600 let flds = expected.only_has_type(fcx).and_then(|ty| {
3602 ty::TyTuple(ref flds) => Some(&flds[..]),
3606 let mut err_field = false;
3608 let elt_ts = elts.iter().enumerate().map(|(i, e)| {
3609 let t = match flds {
3610 Some(ref fs) if i < fs.len() => {
3612 check_expr_coercable_to_type(fcx, &**e, ety);
3616 check_expr_with_expectation(fcx, &**e, NoExpectation);
3620 err_field = err_field || t.references_error();
3624 fcx.write_error(id);
3626 let typ = tcx.mk_tup(elt_ts);
3627 fcx.write_ty(id, typ);
3630 hir::ExprStruct(ref path, ref fields, ref base_expr) => {
3631 check_expr_struct(fcx, expr, path, fields, base_expr);
3633 fcx.require_expr_have_sized_type(expr, traits::StructInitializerSized);
3635 hir::ExprField(ref base, ref field) => {
3636 check_field(fcx, expr, lvalue_pref, &**base, field);
3638 hir::ExprTupField(ref base, idx) => {
3639 check_tup_field(fcx, expr, lvalue_pref, &**base, idx);
3641 hir::ExprIndex(ref base, ref idx) => {
3642 check_expr_with_lvalue_pref(fcx, &**base, lvalue_pref);
3643 check_expr(fcx, &**idx);
3645 let base_t = fcx.expr_ty(&**base);
3646 let idx_t = fcx.expr_ty(&**idx);
3648 if base_t.references_error() {
3649 fcx.write_ty(id, base_t);
3650 } else if idx_t.references_error() {
3651 fcx.write_ty(id, idx_t);
3653 let base_t = structurally_resolved_type(fcx, expr.span, base_t);
3654 match lookup_indexing(fcx, expr, base, base_t, idx_t, lvalue_pref) {
3655 Some((index_ty, element_ty)) => {
3656 let idx_expr_ty = fcx.expr_ty(idx);
3657 demand::eqtype(fcx, expr.span, index_ty, idx_expr_ty);
3658 fcx.write_ty(id, element_ty);
3661 check_expr_has_type(fcx, &**idx, fcx.tcx().types.err);
3662 fcx.type_error_message(
3665 format!("cannot index a value of type `{}`",
3670 fcx.write_ty(id, fcx.tcx().types.err);
3675 hir::ExprRange(ref start, ref end) => {
3676 let t_start = start.as_ref().map(|e| {
3677 check_expr(fcx, &**e);
3680 let t_end = end.as_ref().map(|e| {
3681 check_expr(fcx, &**e);
3685 let idx_type = match (t_start, t_end) {
3686 (Some(ty), None) | (None, Some(ty)) => {
3689 (Some(t_start), Some(t_end)) if (t_start.references_error() ||
3690 t_end.references_error()) => {
3691 Some(fcx.tcx().types.err)
3693 (Some(t_start), Some(t_end)) => {
3694 Some(infer::common_supertype(fcx.infcx(),
3695 TypeOrigin::RangeExpression(expr.span),
3703 // Note that we don't check the type of start/end satisfy any
3704 // bounds because right now the range structs do not have any. If we add
3705 // some bounds, then we'll need to check `t_start` against them here.
3707 let range_type = match idx_type {
3708 Some(idx_type) if idx_type.references_error() => {
3712 // Find the did from the appropriate lang item.
3713 let did = match (start, end) {
3714 (&Some(_), &Some(_)) => tcx.lang_items.range_struct(),
3715 (&Some(_), &None) => tcx.lang_items.range_from_struct(),
3716 (&None, &Some(_)) => tcx.lang_items.range_to_struct(),
3718 tcx.sess.span_bug(expr.span, "full range should be dealt with above")
3722 if let Some(did) = did {
3723 let def = tcx.lookup_adt_def(did);
3724 let predicates = tcx.lookup_predicates(did);
3725 let substs = Substs::new_type(vec![idx_type], vec![]);
3726 let bounds = fcx.instantiate_bounds(expr.span, &substs, &predicates);
3727 fcx.add_obligations_for_parameters(
3728 traits::ObligationCause::new(expr.span,
3730 traits::ItemObligation(did)),
3733 tcx.mk_struct(def, tcx.mk_substs(substs))
3735 span_err!(tcx.sess, expr.span, E0236, "no lang item for range syntax");
3740 // Neither start nor end => RangeFull
3741 if let Some(did) = tcx.lang_items.range_full_struct() {
3743 tcx.lookup_adt_def(did),
3744 tcx.mk_substs(Substs::empty())
3747 span_err!(tcx.sess, expr.span, E0237, "no lang item for range syntax");
3753 fcx.write_ty(id, range_type);
3758 debug!("type of expr({}) {} is...", expr.id,
3759 pprust::expr_to_string(expr));
3760 debug!("... {:?}, expected is {:?}",
3767 pub fn resolve_ty_and_def_ufcs<'a, 'b, 'tcx>(fcx: &FnCtxt<'b, 'tcx>,
3768 path_res: def::PathResolution,
3769 opt_self_ty: Option<Ty<'tcx>>,
3770 path: &'a hir::Path,
3772 node_id: ast::NodeId)
3773 -> Option<(Option<Ty<'tcx>>,
3774 &'a [hir::PathSegment],
3778 // Associated constants can't depend on generic types.
3779 fn have_disallowed_generic_consts<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
3783 node_id: ast::NodeId) -> bool {
3785 def::DefAssociatedConst(..) => {
3786 if ty.has_param_types() || ty.has_self_ty() {
3787 span_err!(fcx.sess(), span, E0329,
3788 "Associated consts cannot depend \
3789 on type parameters or Self.");
3790 fcx.write_error(node_id);
3799 // If fully resolved already, we don't have to do anything.
3800 if path_res.depth == 0 {
3801 if let Some(ty) = opt_self_ty {
3802 if have_disallowed_generic_consts(fcx, path_res.full_def(), ty,
3807 Some((opt_self_ty, &path.segments, path_res.base_def))
3809 let mut def = path_res.base_def;
3810 let ty_segments = path.segments.split_last().unwrap().1;
3811 let base_ty_end = path.segments.len() - path_res.depth;
3812 let ty = astconv::finish_resolving_def_to_ty(fcx, fcx, span,
3813 PathParamMode::Optional,
3816 &ty_segments[..base_ty_end],
3817 &ty_segments[base_ty_end..]);
3818 let item_segment = path.segments.last().unwrap();
3819 let item_name = item_segment.identifier.name;
3820 match method::resolve_ufcs(fcx, span, item_name, ty, node_id) {
3822 if have_disallowed_generic_consts(fcx, def, ty, span, node_id) {
3825 // Write back the new resolution.
3826 fcx.ccx.tcx.def_map.borrow_mut()
3827 .insert(node_id, def::PathResolution {
3829 last_private: path_res.last_private.or(lp),
3832 Some((Some(ty), slice::ref_slice(item_segment), def))
3835 method::report_error(fcx, span, ty,
3836 item_name, None, error);
3837 fcx.write_error(node_id);
3844 impl<'tcx> Expectation<'tcx> {
3845 /// Provide an expectation for an rvalue expression given an *optional*
3846 /// hint, which is not required for type safety (the resulting type might
3847 /// be checked higher up, as is the case with `&expr` and `box expr`), but
3848 /// is useful in determining the concrete type.
3850 /// The primary use case is where the expected type is a fat pointer,
3851 /// like `&[isize]`. For example, consider the following statement:
3853 /// let x: &[isize] = &[1, 2, 3];
3855 /// In this case, the expected type for the `&[1, 2, 3]` expression is
3856 /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
3857 /// expectation `ExpectHasType([isize])`, that would be too strong --
3858 /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
3859 /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
3860 /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
3861 /// which still is useful, because it informs integer literals and the like.
3862 /// See the test case `test/run-pass/coerce-expect-unsized.rs` and #20169
3863 /// for examples of where this comes up,.
3864 fn rvalue_hint(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> {
3865 match tcx.struct_tail(ty).sty {
3866 ty::TySlice(_) | ty::TyTrait(..) => {
3867 ExpectRvalueLikeUnsized(ty)
3869 _ => ExpectHasType(ty)
3873 // Resolves `expected` by a single level if it is a variable. If
3874 // there is no expected type or resolution is not possible (e.g.,
3875 // no constraints yet present), just returns `None`.
3876 fn resolve<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> {
3881 ExpectCastableToType(t) => {
3882 ExpectCastableToType(
3883 fcx.infcx().resolve_type_vars_if_possible(&t))
3885 ExpectHasType(t) => {
3887 fcx.infcx().resolve_type_vars_if_possible(&t))
3889 ExpectRvalueLikeUnsized(t) => {
3890 ExpectRvalueLikeUnsized(
3891 fcx.infcx().resolve_type_vars_if_possible(&t))
3896 fn to_option<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
3897 match self.resolve(fcx) {
3898 NoExpectation => None,
3899 ExpectCastableToType(ty) |
3901 ExpectRvalueLikeUnsized(ty) => Some(ty),
3905 fn only_has_type<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
3906 match self.resolve(fcx) {
3907 ExpectHasType(ty) => Some(ty),
3913 pub fn check_decl_initializer<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
3914 local: &'tcx hir::Local,
3915 init: &'tcx hir::Expr)
3917 let ref_bindings = fcx.tcx().pat_contains_ref_binding(&local.pat);
3919 let local_ty = fcx.local_ty(init.span, local.id);
3920 if let Some(m) = ref_bindings {
3921 // Somewhat subtle: if we have a `ref` binding in the pattern,
3922 // we want to avoid introducing coercions for the RHS. This is
3923 // both because it helps preserve sanity and, in the case of
3924 // ref mut, for soundness (issue #23116). In particular, in
3925 // the latter case, we need to be clear that the type of the
3926 // referent for the reference that results is *equal to* the
3927 // type of the lvalue it is referencing, and not some
3928 // supertype thereof.
3929 check_expr_with_lvalue_pref(fcx, init, LvaluePreference::from_mutbl(m));
3930 let init_ty = fcx.expr_ty(init);
3931 demand::eqtype(fcx, init.span, init_ty, local_ty);
3933 check_expr_coercable_to_type(fcx, init, local_ty)
3937 pub fn check_decl_local<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, local: &'tcx hir::Local) {
3938 let tcx = fcx.ccx.tcx;
3940 let t = fcx.local_ty(local.span, local.id);
3941 fcx.write_ty(local.id, t);
3943 if let Some(ref init) = local.init {
3944 check_decl_initializer(fcx, local, &**init);
3945 let init_ty = fcx.expr_ty(&**init);
3946 if init_ty.references_error() {
3947 fcx.write_ty(local.id, init_ty);
3951 let pcx = pat_ctxt {
3953 map: pat_id_map(&tcx.def_map, &*local.pat),
3955 _match::check_pat(&pcx, &*local.pat, t);
3956 let pat_ty = fcx.node_ty(local.pat.id);
3957 if pat_ty.references_error() {
3958 fcx.write_ty(local.id, pat_ty);
3962 pub fn check_stmt<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, stmt: &'tcx hir::Stmt) {
3964 let mut saw_bot = false;
3965 let mut saw_err = false;
3967 hir::StmtDecl(ref decl, id) => {
3970 hir::DeclLocal(ref l) => {
3971 check_decl_local(fcx, &**l);
3972 let l_t = fcx.node_ty(l.id);
3973 saw_bot = saw_bot || fcx.infcx().type_var_diverges(l_t);
3974 saw_err = saw_err || l_t.references_error();
3976 hir::DeclItem(_) => {/* ignore for now */ }
3979 hir::StmtExpr(ref expr, id) => {
3981 // Check with expected type of ()
3982 check_expr_has_type(fcx, &**expr, fcx.tcx().mk_nil());
3983 let expr_ty = fcx.expr_ty(&**expr);
3984 saw_bot = saw_bot || fcx.infcx().type_var_diverges(expr_ty);
3985 saw_err = saw_err || expr_ty.references_error();
3987 hir::StmtSemi(ref expr, id) => {
3989 check_expr(fcx, &**expr);
3990 let expr_ty = fcx.expr_ty(&**expr);
3991 saw_bot |= fcx.infcx().type_var_diverges(expr_ty);
3992 saw_err |= expr_ty.references_error();
3996 fcx.write_ty(node_id, fcx.infcx().next_diverging_ty_var());
3999 fcx.write_error(node_id);
4002 fcx.write_nil(node_id)
4006 pub fn check_block_no_value<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, blk: &'tcx hir::Block) {
4007 check_block_with_expected(fcx, blk, ExpectHasType(fcx.tcx().mk_nil()));
4008 let blkty = fcx.node_ty(blk.id);
4009 if blkty.references_error() {
4010 fcx.write_error(blk.id);
4012 let nilty = fcx.tcx().mk_nil();
4013 demand::suptype(fcx, blk.span, nilty, blkty);
4017 fn check_block_with_expected<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4018 blk: &'tcx hir::Block,
4019 expected: Expectation<'tcx>) {
4021 let mut fcx_ps = fcx.ps.borrow_mut();
4022 let unsafety_state = fcx_ps.recurse(blk);
4023 replace(&mut *fcx_ps, unsafety_state)
4026 let mut warned = false;
4027 let mut any_diverges = false;
4028 let mut any_err = false;
4029 for s in &blk.stmts {
4031 let s_id = ::rustc_front::util::stmt_id(s);
4032 let s_ty = fcx.node_ty(s_id);
4033 if any_diverges && !warned && match s.node {
4034 hir::StmtDecl(ref decl, _) => {
4036 hir::DeclLocal(_) => true,
4040 hir::StmtExpr(_, _) | hir::StmtSemi(_, _) => true,
4045 .add_lint(lint::builtin::UNREACHABLE_CODE,
4048 "unreachable statement".to_string());
4051 any_diverges = any_diverges || fcx.infcx().type_var_diverges(s_ty);
4052 any_err = any_err || s_ty.references_error();
4055 None => if any_err {
4056 fcx.write_error(blk.id);
4057 } else if any_diverges {
4058 fcx.write_ty(blk.id, fcx.infcx().next_diverging_ty_var());
4060 fcx.write_nil(blk.id);
4063 if any_diverges && !warned {
4067 .add_lint(lint::builtin::UNREACHABLE_CODE,
4070 "unreachable expression".to_string());
4072 let ety = match expected {
4073 ExpectHasType(ety) => {
4074 check_expr_coercable_to_type(fcx, &**e, ety);
4078 check_expr_with_expectation(fcx, &**e, expected);
4084 fcx.write_error(blk.id);
4085 } else if any_diverges {
4086 fcx.write_ty(blk.id, fcx.infcx().next_diverging_ty_var());
4088 fcx.write_ty(blk.id, ety);
4093 *fcx.ps.borrow_mut() = prev;
4096 /// Checks a constant appearing in a type. At the moment this is just the
4097 /// length expression in a fixed-length vector, but someday it might be
4098 /// extended to type-level numeric literals.
4099 fn check_const_in_type<'a,'tcx>(ccx: &'a CrateCtxt<'a,'tcx>,
4100 expr: &'tcx hir::Expr,
4101 expected_type: Ty<'tcx>) {
4102 let tables = RefCell::new(ty::Tables::empty());
4103 let inh = static_inherited_fields(ccx, &tables);
4104 let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(expected_type), expr.id);
4105 check_const_with_ty(&fcx, expr.span, expr, expected_type);
4108 fn check_const<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
4112 let tables = RefCell::new(ty::Tables::empty());
4113 let inh = static_inherited_fields(ccx, &tables);
4114 let rty = ccx.tcx.node_id_to_type(id);
4115 let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(rty), e.id);
4116 let declty = fcx.ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(id)).ty;
4117 check_const_with_ty(&fcx, sp, e, declty);
4120 fn check_const_with_ty<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4124 // Gather locals in statics (because of block expressions).
4125 // This is technically unnecessary because locals in static items are forbidden,
4126 // but prevents type checking from blowing up before const checking can properly
4128 GatherLocalsVisitor { fcx: fcx }.visit_expr(e);
4130 check_expr_with_hint(fcx, e, declty);
4131 demand::coerce(fcx, e.span, declty, e);
4133 fcx.select_all_obligations_and_apply_defaults();
4134 upvar::closure_analyze_const(&fcx, e);
4135 fcx.select_obligations_where_possible();
4137 fcx.select_all_obligations_or_error();
4139 regionck::regionck_expr(fcx, e);
4140 writeback::resolve_type_vars_in_expr(fcx, e);
4143 /// Checks whether a type can be represented in memory. In particular, it
4144 /// identifies types that contain themselves without indirection through a
4145 /// pointer, which would mean their size is unbounded.
4146 pub fn check_representable(tcx: &ty::ctxt,
4148 item_id: ast::NodeId,
4149 designation: &str) -> bool {
4150 let rty = tcx.node_id_to_type(item_id);
4152 // Check that it is possible to represent this type. This call identifies
4153 // (1) types that contain themselves and (2) types that contain a different
4154 // recursive type. It is only necessary to throw an error on those that
4155 // contain themselves. For case 2, there must be an inner type that will be
4156 // caught by case 1.
4157 match rty.is_representable(tcx, sp) {
4158 Representability::SelfRecursive => {
4159 span_err!(tcx.sess, sp, E0072, "invalid recursive {} type", designation);
4160 tcx.sess.fileline_help(
4161 sp, "wrap the inner value in a box to make it representable");
4164 Representability::Representable | Representability::ContainsRecursive => (),
4169 pub fn check_simd(tcx: &ty::ctxt, sp: Span, id: ast::NodeId) {
4170 let t = tcx.node_id_to_type(id);
4172 ty::TyStruct(def, substs) => {
4173 let fields = &def.struct_variant().fields;
4174 if fields.is_empty() {
4175 span_err!(tcx.sess, sp, E0075, "SIMD vector cannot be empty");
4178 let e = fields[0].ty(tcx, substs);
4179 if !fields.iter().all(|f| f.ty(tcx, substs) == e) {
4180 span_err!(tcx.sess, sp, E0076, "SIMD vector should be homogeneous");
4184 ty::TyParam(_) => { /* struct<T>(T, T, T, T) is ok */ }
4185 _ if e.is_machine() => { /* struct(u8, u8, u8, u8) is ok */ }
4187 span_err!(tcx.sess, sp, E0077,
4188 "SIMD vector element type should be machine type");
4197 pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
4199 vs: &'tcx [hir::Variant],
4202 fn disr_in_range(ccx: &CrateCtxt,
4204 disr: ty::Disr) -> bool {
4205 fn uint_in_range(ccx: &CrateCtxt, ty: ast::UintTy, disr: ty::Disr) -> bool {
4207 ast::TyU8 => disr as u8 as Disr == disr,
4208 ast::TyU16 => disr as u16 as Disr == disr,
4209 ast::TyU32 => disr as u32 as Disr == disr,
4210 ast::TyU64 => disr as u64 as Disr == disr,
4211 ast::TyUs => uint_in_range(ccx, ccx.tcx.sess.target.uint_type, disr)
4214 fn int_in_range(ccx: &CrateCtxt, ty: ast::IntTy, disr: ty::Disr) -> bool {
4216 ast::TyI8 => disr as i8 as Disr == disr,
4217 ast::TyI16 => disr as i16 as Disr == disr,
4218 ast::TyI32 => disr as i32 as Disr == disr,
4219 ast::TyI64 => disr as i64 as Disr == disr,
4220 ast::TyIs => int_in_range(ccx, ccx.tcx.sess.target.int_type, disr)
4224 attr::UnsignedInt(ty) => uint_in_range(ccx, ty, disr),
4225 attr::SignedInt(ty) => int_in_range(ccx, ty, disr)
4229 fn do_check<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
4230 vs: &'tcx [hir::Variant],
4232 hint: attr::ReprAttr) {
4233 #![allow(trivial_numeric_casts)]
4235 let rty = ccx.tcx.node_id_to_type(id);
4236 let mut disr_vals: Vec<ty::Disr> = Vec::new();
4238 let tables = RefCell::new(ty::Tables::empty());
4239 let inh = static_inherited_fields(ccx, &tables);
4240 let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(rty), id);
4242 let (_, repr_type_ty) = ccx.tcx.enum_repr_type(Some(&hint));
4244 if let Some(ref e) = v.node.disr_expr {
4245 check_const_with_ty(&fcx, e.span, e, repr_type_ty);
4249 let def_id = ccx.tcx.map.local_def_id(id);
4251 let variants = &ccx.tcx.lookup_adt_def(def_id).variants;
4252 for (v, variant) in vs.iter().zip(variants.iter()) {
4253 let current_disr_val = variant.disr_val;
4255 // Check for duplicate discriminant values
4256 match disr_vals.iter().position(|&x| x == current_disr_val) {
4258 span_err!(ccx.tcx.sess, v.span, E0081,
4259 "discriminant value `{}` already exists", disr_vals[i]);
4260 let variant_i_node_id = ccx.tcx.map.as_local_node_id(variants[i].did).unwrap();
4261 span_note!(ccx.tcx.sess, ccx.tcx.map.span(variant_i_node_id),
4262 "conflicting discriminant here")
4266 // Check for unrepresentable discriminant values
4268 attr::ReprAny | attr::ReprExtern => (),
4269 attr::ReprInt(sp, ity) => {
4270 if !disr_in_range(ccx, ity, current_disr_val) {
4271 span_err!(ccx.tcx.sess, v.span, E0082,
4272 "discriminant value outside specified type");
4273 span_note!(ccx.tcx.sess, sp,
4274 "discriminant type specified here");
4278 ccx.tcx.sess.bug("range_to_inttype: found ReprSimd on an enum");
4280 attr::ReprPacked => {
4281 ccx.tcx.sess.bug("range_to_inttype: found ReprPacked on an enum");
4284 disr_vals.push(current_disr_val);
4288 let def_id = ccx.tcx.map.local_def_id(id);
4289 let hint = *ccx.tcx.lookup_repr_hints(def_id).get(0).unwrap_or(&attr::ReprAny);
4291 if hint != attr::ReprAny && vs.len() <= 1 {
4293 span_err!(ccx.tcx.sess, sp, E0083,
4294 "unsupported representation for univariant enum");
4296 span_err!(ccx.tcx.sess, sp, E0084,
4297 "unsupported representation for zero-variant enum");
4301 do_check(ccx, vs, id, hint);
4303 check_representable(ccx.tcx, sp, id, "enum");
4306 // Returns the type parameter count and the type for the given definition.
4307 fn type_scheme_and_predicates_for_def<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4310 -> (TypeScheme<'tcx>, GenericPredicates<'tcx>) {
4312 def::DefLocal(_, nid) | def::DefUpvar(_, nid, _, _) => {
4313 let typ = fcx.local_ty(sp, nid);
4314 (ty::TypeScheme { generics: ty::Generics::empty(), ty: typ },
4315 ty::GenericPredicates::empty())
4317 def::DefFn(id, _) | def::DefMethod(id) |
4318 def::DefStatic(id, _) | def::DefVariant(_, id, _) |
4319 def::DefStruct(id) | def::DefConst(id) | def::DefAssociatedConst(id) => {
4320 (fcx.tcx().lookup_item_type(id), fcx.tcx().lookup_predicates(id))
4324 def::DefAssociatedTy(..) |
4326 def::DefTyParam(..) |
4328 def::DefForeignMod(..) |
4331 def::DefSelfTy(..) |
4333 fcx.ccx.tcx.sess.span_bug(sp, &format!("expected value, found {:?}", defn));
4338 // Instantiates the given path, which must refer to an item with the given
4339 // number of type parameters and type.
4340 pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4341 segments: &[hir::PathSegment],
4342 type_scheme: TypeScheme<'tcx>,
4343 type_predicates: &ty::GenericPredicates<'tcx>,
4344 opt_self_ty: Option<Ty<'tcx>>,
4347 node_id: ast::NodeId) {
4348 debug!("instantiate_path(path={:?}, def={:?}, node_id={}, type_scheme={:?})",
4354 // We need to extract the type parameters supplied by the user in
4355 // the path `path`. Due to the current setup, this is a bit of a
4356 // tricky-process; the problem is that resolve only tells us the
4357 // end-point of the path resolution, and not the intermediate steps.
4358 // Luckily, we can (at least for now) deduce the intermediate steps
4359 // just from the end-point.
4361 // There are basically four cases to consider:
4363 // 1. Reference to a *type*, such as a struct or enum:
4365 // mod a { struct Foo<T> { ... } }
4367 // Because we don't allow types to be declared within one
4368 // another, a path that leads to a type will always look like
4369 // `a::b::Foo<T>` where `a` and `b` are modules. This implies
4370 // that only the final segment can have type parameters, and
4371 // they are located in the TypeSpace.
4373 // *Note:* Generally speaking, references to types don't
4374 // actually pass through this function, but rather the
4375 // `ast_ty_to_ty` function in `astconv`. However, in the case
4376 // of struct patterns (and maybe literals) we do invoke
4377 // `instantiate_path` to get the general type of an instance of
4378 // a struct. (In these cases, there are actually no type
4379 // parameters permitted at present, but perhaps we will allow
4380 // them in the future.)
4382 // 1b. Reference to an enum variant or tuple-like struct:
4384 // struct foo<T>(...)
4385 // enum E<T> { foo(...) }
4387 // In these cases, the parameters are declared in the type
4390 // 2. Reference to a *fn item*:
4394 // In this case, the path will again always have the form
4395 // `a::b::foo::<T>` where only the final segment should have
4396 // type parameters. However, in this case, those parameters are
4397 // declared on a value, and hence are in the `FnSpace`.
4399 // 3. Reference to a *method*:
4401 // impl<A> SomeStruct<A> {
4405 // Here we can have a path like
4406 // `a::b::SomeStruct::<A>::foo::<B>`, in which case parameters
4407 // may appear in two places. The penultimate segment,
4408 // `SomeStruct::<A>`, contains parameters in TypeSpace, and the
4409 // final segment, `foo::<B>` contains parameters in fn space.
4411 // 4. Reference to an *associated const*:
4413 // impl<A> AnotherStruct<A> {
4414 // const FOO: B = BAR;
4417 // The path in this case will look like
4418 // `a::b::AnotherStruct::<A>::FOO`, so the penultimate segment
4419 // only will have parameters in TypeSpace.
4421 // The first step then is to categorize the segments appropriately.
4423 assert!(!segments.is_empty());
4425 let mut ufcs_associated = None;
4426 let mut segment_spaces: Vec<_>;
4428 // Case 1 and 1b. Reference to a *type* or *enum variant*.
4429 def::DefSelfTy(..) |
4430 def::DefStruct(..) |
4431 def::DefVariant(..) |
4433 def::DefAssociatedTy(..) |
4435 def::DefPrimTy(..) |
4436 def::DefTyParam(..) => {
4437 // Everything but the final segment should have no
4438 // parameters at all.
4439 segment_spaces = vec![None; segments.len() - 1];
4440 segment_spaces.push(Some(subst::TypeSpace));
4443 // Case 2. Reference to a top-level value.
4446 def::DefStatic(..) => {
4447 segment_spaces = vec![None; segments.len() - 1];
4448 segment_spaces.push(Some(subst::FnSpace));
4451 // Case 3. Reference to a method.
4452 def::DefMethod(def_id) => {
4453 let container = fcx.tcx().impl_or_trait_item(def_id).container();
4455 ty::TraitContainer(trait_did) => {
4456 callee::check_legal_trait_for_method_call(fcx.ccx, span, trait_did)
4458 ty::ImplContainer(_) => {}
4461 if segments.len() >= 2 {
4462 segment_spaces = vec![None; segments.len() - 2];
4463 segment_spaces.push(Some(subst::TypeSpace));
4464 segment_spaces.push(Some(subst::FnSpace));
4466 // `<T>::method` will end up here, and so can `T::method`.
4467 let self_ty = opt_self_ty.expect("UFCS sugared method missing Self");
4468 segment_spaces = vec![Some(subst::FnSpace)];
4469 ufcs_associated = Some((container, self_ty));
4473 def::DefAssociatedConst(def_id) => {
4474 let container = fcx.tcx().impl_or_trait_item(def_id).container();
4476 ty::TraitContainer(trait_did) => {
4477 callee::check_legal_trait_for_method_call(fcx.ccx, span, trait_did)
4479 ty::ImplContainer(_) => {}
4482 if segments.len() >= 2 {
4483 segment_spaces = vec![None; segments.len() - 2];
4484 segment_spaces.push(Some(subst::TypeSpace));
4485 segment_spaces.push(None);
4487 // `<T>::CONST` will end up here, and so can `T::CONST`.
4488 let self_ty = opt_self_ty.expect("UFCS sugared const missing Self");
4489 segment_spaces = vec![None];
4490 ufcs_associated = Some((container, self_ty));
4494 // Other cases. Various nonsense that really shouldn't show up
4495 // here. If they do, an error will have been reported
4496 // elsewhere. (I hope)
4498 def::DefForeignMod(..) |
4504 segment_spaces = vec![None; segments.len()];
4507 assert_eq!(segment_spaces.len(), segments.len());
4509 // In `<T as Trait<A, B>>::method`, `A` and `B` are mandatory, but
4510 // `opt_self_ty` can also be Some for `Foo::method`, where Foo's
4511 // type parameters are not mandatory.
4512 let require_type_space = opt_self_ty.is_some() && ufcs_associated.is_none();
4514 debug!("segment_spaces={:?}", segment_spaces);
4516 // Next, examine the definition, and determine how many type
4517 // parameters we expect from each space.
4518 let type_defs = &type_scheme.generics.types;
4519 let region_defs = &type_scheme.generics.regions;
4521 // Now that we have categorized what space the parameters for each
4522 // segment belong to, let's sort out the parameters that the user
4523 // provided (if any) into their appropriate spaces. We'll also report
4524 // errors if type parameters are provided in an inappropriate place.
4525 let mut substs = Substs::empty();
4526 for (opt_space, segment) in segment_spaces.iter().zip(segments) {
4529 prohibit_type_params(fcx.tcx(), slice::ref_slice(segment));
4533 push_explicit_parameters_from_segment_to_substs(fcx,
4543 if let Some(self_ty) = opt_self_ty {
4544 if type_defs.len(subst::SelfSpace) == 1 {
4545 substs.types.push(subst::SelfSpace, self_ty);
4549 // Now we have to compare the types that the user *actually*
4550 // provided against the types that were *expected*. If the user
4551 // did not provide any types, then we want to substitute inference
4552 // variables. If the user provided some types, we may still need
4553 // to add defaults. If the user provided *too many* types, that's
4555 for &space in &[subst::SelfSpace, subst::TypeSpace, subst::FnSpace] {
4556 adjust_type_parameters(fcx, span, space, type_defs,
4557 require_type_space, &mut substs);
4558 assert_eq!(substs.types.len(space), type_defs.len(space));
4560 adjust_region_parameters(fcx, span, space, region_defs, &mut substs);
4561 assert_eq!(substs.regions().len(space), region_defs.len(space));
4564 // The things we are substituting into the type should not contain
4565 // escaping late-bound regions, and nor should the base type scheme.
4566 assert!(!substs.has_regions_escaping_depth(0));
4567 assert!(!type_scheme.has_escaping_regions());
4569 // Add all the obligations that are required, substituting and
4570 // normalized appropriately.
4571 let bounds = fcx.instantiate_bounds(span, &substs, &type_predicates);
4572 fcx.add_obligations_for_parameters(
4573 traits::ObligationCause::new(span, fcx.body_id, traits::ItemObligation(def.def_id())),
4576 // Substitute the values for the type parameters into the type of
4577 // the referenced item.
4578 let ty_substituted = fcx.instantiate_type_scheme(span, &substs, &type_scheme.ty);
4581 if let Some((ty::ImplContainer(impl_def_id), self_ty)) = ufcs_associated {
4582 // In the case of `Foo<T>::method` and `<Foo<T>>::method`, if `method`
4583 // is inherent, there is no `Self` parameter, instead, the impl needs
4584 // type parameters, which we can infer by unifying the provided `Self`
4585 // with the substituted impl type.
4586 let impl_scheme = fcx.tcx().lookup_item_type(impl_def_id);
4587 assert_eq!(substs.types.len(subst::TypeSpace),
4588 impl_scheme.generics.types.len(subst::TypeSpace));
4589 assert_eq!(substs.regions().len(subst::TypeSpace),
4590 impl_scheme.generics.regions.len(subst::TypeSpace));
4592 let impl_ty = fcx.instantiate_type_scheme(span, &substs, &impl_scheme.ty);
4593 if fcx.mk_subty(false, TypeOrigin::Misc(span), self_ty, impl_ty).is_err() {
4594 fcx.tcx().sess.span_bug(span,
4596 "instantiate_path: (UFCS) {:?} was a subtype of {:?} but now is not?",
4602 debug!("instantiate_path: type of {:?} is {:?}",
4605 fcx.write_ty(node_id, ty_substituted);
4606 fcx.write_substs(node_id, ty::ItemSubsts { substs: substs });
4609 /// Finds the parameters that the user provided and adds them to `substs`. If too many
4610 /// parameters are provided, then reports an error and clears the output vector.
4612 /// We clear the output vector because that will cause the `adjust_XXX_parameters()` later to
4613 /// use inference variables. This seems less likely to lead to derived errors.
4615 /// Note that we *do not* check for *too few* parameters here. Due to the presence of defaults
4616 /// etc that is more complicated. I wanted however to do the reporting of *too many* parameters
4617 /// here because we can easily use the precise span of the N+1'th parameter.
4618 fn push_explicit_parameters_from_segment_to_substs<'a, 'tcx>(
4619 fcx: &FnCtxt<'a, 'tcx>,
4620 space: subst::ParamSpace,
4622 type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4623 region_defs: &VecPerParamSpace<ty::RegionParameterDef>,
4624 segment: &hir::PathSegment,
4625 substs: &mut Substs<'tcx>)
4627 match segment.parameters {
4628 hir::AngleBracketedParameters(ref data) => {
4629 push_explicit_angle_bracketed_parameters_from_segment_to_substs(
4630 fcx, space, type_defs, region_defs, data, substs);
4633 hir::ParenthesizedParameters(ref data) => {
4634 span_err!(fcx.tcx().sess, span, E0238,
4635 "parenthesized parameters may only be used with a trait");
4636 push_explicit_parenthesized_parameters_from_segment_to_substs(
4637 fcx, space, span, type_defs, data, substs);
4642 fn push_explicit_angle_bracketed_parameters_from_segment_to_substs<'a, 'tcx>(
4643 fcx: &FnCtxt<'a, 'tcx>,
4644 space: subst::ParamSpace,
4645 type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4646 region_defs: &VecPerParamSpace<ty::RegionParameterDef>,
4647 data: &hir::AngleBracketedParameterData,
4648 substs: &mut Substs<'tcx>)
4651 let type_count = type_defs.len(space);
4652 assert_eq!(substs.types.len(space), 0);
4653 for (i, typ) in data.types.iter().enumerate() {
4654 let t = fcx.to_ty(&**typ);
4656 substs.types.push(space, t);
4657 } else if i == type_count {
4658 span_err!(fcx.tcx().sess, typ.span, E0087,
4659 "too many type parameters provided: \
4660 expected at most {} parameter{}, \
4661 found {} parameter{}",
4663 if type_count == 1 {""} else {"s"},
4665 if data.types.len() == 1 {""} else {"s"});
4666 substs.types.truncate(space, 0);
4672 if !data.bindings.is_empty() {
4673 span_err!(fcx.tcx().sess, data.bindings[0].span, E0182,
4674 "unexpected binding of associated item in expression path \
4675 (only allowed in type paths)");
4679 let region_count = region_defs.len(space);
4680 assert_eq!(substs.regions().len(space), 0);
4681 for (i, lifetime) in data.lifetimes.iter().enumerate() {
4682 let r = ast_region_to_region(fcx.tcx(), lifetime);
4683 if i < region_count {
4684 substs.mut_regions().push(space, r);
4685 } else if i == region_count {
4686 span_err!(fcx.tcx().sess, lifetime.span, E0088,
4687 "too many lifetime parameters provided: \
4688 expected {} parameter{}, found {} parameter{}",
4690 if region_count == 1 {""} else {"s"},
4691 data.lifetimes.len(),
4692 if data.lifetimes.len() == 1 {""} else {"s"});
4693 substs.mut_regions().truncate(space, 0);
4701 /// `push_explicit_angle_bracketed_parameters_from_segment_to_substs`,
4702 /// but intended for `Foo(A,B) -> C` form. This expands to
4703 /// roughly the same thing as `Foo<(A,B),C>`. One important
4704 /// difference has to do with the treatment of anonymous
4705 /// regions, which are translated into bound regions (NYI).
4706 fn push_explicit_parenthesized_parameters_from_segment_to_substs<'a, 'tcx>(
4707 fcx: &FnCtxt<'a, 'tcx>,
4708 space: subst::ParamSpace,
4710 type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4711 data: &hir::ParenthesizedParameterData,
4712 substs: &mut Substs<'tcx>)
4714 let type_count = type_defs.len(space);
4716 span_err!(fcx.tcx().sess, span, E0167,
4717 "parenthesized form always supplies 2 type parameters, \
4718 but only {} parameter(s) were expected",
4722 let input_tys: Vec<Ty> =
4723 data.inputs.iter().map(|ty| fcx.to_ty(&**ty)).collect();
4725 let tuple_ty = fcx.tcx().mk_tup(input_tys);
4727 if type_count >= 1 {
4728 substs.types.push(space, tuple_ty);
4731 let output_ty: Option<Ty> =
4732 data.output.as_ref().map(|ty| fcx.to_ty(&**ty));
4735 output_ty.unwrap_or(fcx.tcx().mk_nil());
4737 if type_count >= 2 {
4738 substs.types.push(space, output_ty);
4742 fn adjust_type_parameters<'a, 'tcx>(
4743 fcx: &FnCtxt<'a, 'tcx>,
4746 defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4747 require_type_space: bool,
4748 substs: &mut Substs<'tcx>)
4750 let provided_len = substs.types.len(space);
4751 let desired = defs.get_slice(space);
4752 let required_len = desired.iter()
4753 .take_while(|d| d.default.is_none())
4756 debug!("adjust_type_parameters(space={:?}, \
4765 // Enforced by `push_explicit_parameters_from_segment_to_substs()`.
4766 assert!(provided_len <= desired.len());
4768 // Nothing specified at all: supply inference variables for
4770 if provided_len == 0 && !(require_type_space && space == subst::TypeSpace) {
4771 substs.types.replace(space, Vec::new());
4772 fcx.infcx().type_vars_for_defs(span, space, substs, &desired[..]);
4776 // Too few parameters specified: report an error and use Err
4778 if provided_len < required_len {
4780 if desired.len() != required_len { "at least " } else { "" };
4781 span_err!(fcx.tcx().sess, span, E0089,
4782 "too few type parameters provided: expected {}{} parameter{}, \
4783 found {} parameter{}",
4784 qualifier, required_len,
4785 if required_len == 1 {""} else {"s"},
4787 if provided_len == 1 {""} else {"s"});
4788 substs.types.replace(space, vec![fcx.tcx().types.err; desired.len()]);
4792 // Otherwise, add in any optional parameters that the user
4793 // omitted. The case of *too many* parameters is handled
4795 // push_explicit_parameters_from_segment_to_substs(). Note
4796 // that the *default* type are expressed in terms of all prior
4797 // parameters, so we have to substitute as we go with the
4798 // partial substitution that we have built up.
4799 for i in provided_len..desired.len() {
4800 let default = desired[i].default.unwrap();
4801 let default = default.subst_spanned(fcx.tcx(), substs, Some(span));
4802 substs.types.push(space, default);
4804 assert_eq!(substs.types.len(space), desired.len());
4806 debug!("Final substs: {:?}", substs);
4809 fn adjust_region_parameters(
4813 defs: &VecPerParamSpace<ty::RegionParameterDef>,
4814 substs: &mut Substs)
4816 let provided_len = substs.mut_regions().len(space);
4817 let desired = defs.get_slice(space);
4819 // Enforced by `push_explicit_parameters_from_segment_to_substs()`.
4820 assert!(provided_len <= desired.len());
4822 // If nothing was provided, just use inference variables.
4823 if provided_len == 0 {
4824 substs.mut_regions().replace(
4826 fcx.infcx().region_vars_for_defs(span, desired));
4830 // If just the right number were provided, everybody is happy.
4831 if provided_len == desired.len() {
4835 // Otherwise, too few were provided. Report an error and then
4836 // use inference variables.
4837 span_err!(fcx.tcx().sess, span, E0090,
4838 "too few lifetime parameters provided: expected {} parameter{}, \
4839 found {} parameter{}",
4841 if desired.len() == 1 {""} else {"s"},
4843 if provided_len == 1 {""} else {"s"});
4845 substs.mut_regions().replace(
4847 fcx.infcx().region_vars_for_defs(span, desired));
4851 fn structurally_resolve_type_or_else<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
4855 where F: Fn() -> Ty<'tcx>
4857 let mut ty = fcx.resolve_type_vars_if_possible(ty);
4860 let alternative = f();
4863 if alternative.is_ty_var() || alternative.references_error() {
4864 fcx.type_error_message(sp, |_actual| {
4865 "the type of this value must be known in this context".to_string()
4867 demand::suptype(fcx, sp, fcx.tcx().types.err, ty);
4868 ty = fcx.tcx().types.err;
4870 demand::suptype(fcx, sp, alternative, ty);
4878 // Resolves `typ` by a single level if `typ` is a type variable. If no
4879 // resolution is possible, then an error is reported.
4880 pub fn structurally_resolved_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4885 structurally_resolve_type_or_else(fcx, sp, ty, || {
4890 // Returns true if b contains a break that can exit from b
4891 pub fn may_break(cx: &ty::ctxt, id: ast::NodeId, b: &hir::Block) -> bool {
4892 // First: is there an unlabeled break immediately
4894 (loop_query(&*b, |e| {
4896 hir::ExprBreak(None) => true,
4900 // Second: is there a labeled break with label
4901 // <id> nested anywhere inside the loop?
4902 (block_query(b, |e| {
4903 if let hir::ExprBreak(Some(_)) = e.node {
4904 lookup_full_def(cx, e.span, e.id) == def::DefLabel(id)
4911 pub fn check_bounds_are_used<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
4912 tps: &OwnedSlice<hir::TyParam>,
4914 debug!("check_bounds_are_used(n_tps={}, ty={:?})",
4917 // make a vector of booleans initially false, set to true when used
4918 if tps.is_empty() { return; }
4919 let mut tps_used = vec![false; tps.len()];
4921 for leaf_ty in ty.walk() {
4922 if let ty::TyParam(ParamTy {idx, ..}) = leaf_ty.sty {
4923 debug!("Found use of ty param num {}", idx);
4924 tps_used[idx as usize] = true;
4928 for (i, b) in tps_used.iter().enumerate() {
4930 span_err!(ccx.tcx.sess, tps[i].span, E0091,
4931 "type parameter `{}` is unused",