1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
15 Within the check phase of type check, we check each item one at a time
16 (bodies of function expressions are checked as part of the containing
17 function). Inference is used to supply types wherever they are
20 By far the most complex case is checking the body of a function. This
21 can be broken down into several distinct phases:
23 - gather: creates type variables to represent the type of each local
24 variable and pattern binding.
26 - main: the main pass does the lion's share of the work: it
27 determines the types of all expressions, resolves
28 methods, checks for most invalid conditions, and so forth. In
29 some cases, where a type is unknown, it may create a type or region
30 variable and use that as the type of an expression.
32 In the process of checking, various constraints will be placed on
33 these type variables through the subtyping relationships requested
34 through the `demand` module. The `infer` module is in charge
35 of resolving those constraints.
37 - regionck: after main is complete, the regionck pass goes over all
38 types looking for regions and making sure that they did not escape
39 into places they are not in scope. This may also influence the
40 final assignments of the various region variables if there is some
43 - vtable: find and records the impls to use for each trait bound that
44 appears on a type parameter.
46 - writeback: writes the final types within a function body, replacing
47 type variables with their final inferred types. These final types
48 are written into the `tcx.node_types` table, which should *never* contain
49 any reference to a type variable.
53 While type checking a function, the intermediate types for the
54 expressions, blocks, and so forth contained within the function are
55 stored in `fcx.node_types` and `fcx.item_substs`. These types
56 may contain unresolved type variables. After type checking is
57 complete, the functions in the writeback module are used to take the
58 types from this table, resolve them, and then write them into their
59 permanent home in the type context `ccx.tcx`.
61 This means that during inferencing you should use `fcx.write_ty()`
62 and `fcx.expr_ty()` / `fcx.node_ty()` to write/obtain the types of
63 nodes within the function.
65 The types of top-level items, which never contain unbound type
66 variables, are stored directly into the `tcx` tables.
68 n.b.: A type variable is not the same thing as a type parameter. A
69 type variable is rather an "instance" of a type parameter: that is,
70 given a generic function `fn foo<T>(t: T)`: while checking the
71 function `foo`, the type `ty_param(0)` refers to the type `T`, which
72 is treated in abstract. When `foo()` is called, however, `T` will be
73 substituted for a fresh type variable `N`. This variable will
74 eventually be resolved to some concrete type (which might itself be
79 pub use self::Expectation::*;
80 pub use self::compare_method::{compare_impl_method, compare_const_impl};
81 use self::TupleArgumentsFlag::*;
83 use astconv::{self, ast_region_to_region, ast_ty_to_ty, AstConv, PathParamMode};
84 use check::_match::pat_ctxt;
85 use fmt_macros::{Parser, Piece, Position};
86 use middle::astconv_util::prohibit_type_params;
87 use middle::cstore::LOCAL_CRATE;
89 use middle::def_id::DefId;
91 use middle::infer::{TypeOrigin, type_variable};
92 use middle::pat_util::{self, pat_id_map};
93 use middle::privacy::{AllPublic, LastMod};
94 use middle::subst::{self, Subst, Substs, VecPerParamSpace, ParamSpace, TypeSpace};
95 use middle::traits::{self, report_fulfillment_errors};
96 use middle::ty::{FnSig, GenericPredicates, TypeScheme};
97 use middle::ty::{Disr, ParamTy, ParameterEnvironment};
98 use middle::ty::{LvaluePreference, NoPreference, PreferMutLvalue};
99 use middle::ty::{self, HasTypeFlags, RegionEscape, ToPolyTraitRef, Ty};
100 use middle::ty::{MethodCall, MethodCallee};
101 use middle::ty::adjustment;
102 use middle::ty::error::TypeError;
103 use middle::ty::fold::{TypeFolder, TypeFoldable};
104 use middle::ty::util::Representability;
105 use require_c_abi_if_variadic;
106 use rscope::{ElisionFailureInfo, RegionScope};
107 use session::Session;
108 use {CrateCtxt, lookup_full_def};
111 use util::common::{block_query, ErrorReported, indenter, loop_query};
112 use util::nodemap::{DefIdMap, FnvHashMap, NodeMap};
114 use std::cell::{Cell, Ref, RefCell};
115 use std::collections::{HashSet};
116 use std::mem::replace;
120 use syntax::attr::AttrMetaMethods;
121 use syntax::codemap::{self, Span, Spanned};
122 use syntax::owned_slice::OwnedSlice;
123 use syntax::parse::token::{self, InternedString};
125 use syntax::util::lev_distance::lev_distance;
127 use rustc_front::intravisit::{self, Visitor};
128 use rustc_front::hir;
129 use rustc_front::hir::Visibility;
130 use rustc_front::hir::{Item, ItemImpl};
131 use rustc_front::print::pprust;
132 use rustc_back::slice;
152 /// closures defined within the function. For example:
155 /// bar(move|| { ... })
158 /// Here, the function `foo()` and the closure passed to
159 /// `bar()` will each have their own `FnCtxt`, but they will
160 /// share the inherited fields.
161 pub struct Inherited<'a, 'tcx: 'a> {
162 infcx: infer::InferCtxt<'a, 'tcx>,
163 locals: RefCell<NodeMap<Ty<'tcx>>>,
165 tables: &'a RefCell<ty::Tables<'tcx>>,
167 // When we process a call like `c()` where `c` is a closure type,
168 // we may not have decided yet whether `c` is a `Fn`, `FnMut`, or
169 // `FnOnce` closure. In that case, we defer full resolution of the
170 // call until upvar inference can kick in and make the
171 // decision. We keep these deferred resolutions grouped by the
172 // def-id of the closure, so that once we decide, we can easily go
173 // back and process them.
174 deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolutionHandler<'tcx>>>>,
176 deferred_cast_checks: RefCell<Vec<cast::CastCheck<'tcx>>>,
179 trait DeferredCallResolution<'tcx> {
180 fn resolve<'a>(&mut self, fcx: &FnCtxt<'a,'tcx>);
183 type DeferredCallResolutionHandler<'tcx> = Box<DeferredCallResolution<'tcx>+'tcx>;
185 /// When type-checking an expression, we propagate downward
186 /// whatever type hint we are able in the form of an `Expectation`.
187 #[derive(Copy, Clone, Debug)]
188 pub enum Expectation<'tcx> {
189 /// We know nothing about what type this expression should have.
192 /// This expression should have the type given (or some subtype)
193 ExpectHasType(Ty<'tcx>),
195 /// This expression will be cast to the `Ty`
196 ExpectCastableToType(Ty<'tcx>),
198 /// This rvalue expression will be wrapped in `&` or `Box` and coerced
199 /// to `&Ty` or `Box<Ty>`, respectively. `Ty` is `[A]` or `Trait`.
200 ExpectRvalueLikeUnsized(Ty<'tcx>),
203 impl<'tcx> Expectation<'tcx> {
204 // Disregard "castable to" expectations because they
205 // can lead us astray. Consider for example `if cond
206 // {22} else {c} as u8` -- if we propagate the
207 // "castable to u8" constraint to 22, it will pick the
208 // type 22u8, which is overly constrained (c might not
209 // be a u8). In effect, the problem is that the
210 // "castable to" expectation is not the tightest thing
211 // we can say, so we want to drop it in this case.
212 // The tightest thing we can say is "must unify with
213 // else branch". Note that in the case of a "has type"
214 // constraint, this limitation does not hold.
216 // If the expected type is just a type variable, then don't use
217 // an expected type. Otherwise, we might write parts of the type
218 // when checking the 'then' block which are incompatible with the
220 fn adjust_for_branches<'a>(&self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> {
222 ExpectHasType(ety) => {
223 let ety = fcx.infcx().shallow_resolve(ety);
224 if !ety.is_ty_var() {
230 ExpectRvalueLikeUnsized(ety) => {
231 ExpectRvalueLikeUnsized(ety)
238 #[derive(Copy, Clone)]
239 pub struct UnsafetyState {
240 pub def: ast::NodeId,
241 pub unsafety: hir::Unsafety,
242 pub unsafe_push_count: u32,
247 pub fn function(unsafety: hir::Unsafety, def: ast::NodeId) -> UnsafetyState {
248 UnsafetyState { def: def, unsafety: unsafety, unsafe_push_count: 0, from_fn: true }
251 pub fn recurse(&mut self, blk: &hir::Block) -> UnsafetyState {
252 match self.unsafety {
253 // If this unsafe, then if the outer function was already marked as
254 // unsafe we shouldn't attribute the unsafe'ness to the block. This
255 // way the block can be warned about instead of ignoring this
256 // extraneous block (functions are never warned about).
257 hir::Unsafety::Unsafe if self.from_fn => *self,
260 let (unsafety, def, count) = match blk.rules {
261 hir::PushUnsafeBlock(..) =>
262 (unsafety, blk.id, self.unsafe_push_count.checked_add(1).unwrap()),
263 hir::PopUnsafeBlock(..) =>
264 (unsafety, blk.id, self.unsafe_push_count.checked_sub(1).unwrap()),
265 hir::UnsafeBlock(..) =>
266 (hir::Unsafety::Unsafe, blk.id, self.unsafe_push_count),
267 hir::DefaultBlock | hir::PushUnstableBlock | hir:: PopUnstableBlock =>
268 (unsafety, self.def, self.unsafe_push_count),
270 UnsafetyState{ def: def,
272 unsafe_push_count: count,
280 pub struct FnCtxt<'a, 'tcx: 'a> {
281 body_id: ast::NodeId,
283 // This flag is set to true if, during the writeback phase, we encounter
284 // a type error in this function.
285 writeback_errors: Cell<bool>,
287 // Number of errors that had been reported when we started
288 // checking this function. On exit, if we find that *more* errors
289 // have been reported, we will skip regionck and other work that
290 // expects the types within the function to be consistent.
291 err_count_on_creation: usize,
293 ret_ty: ty::FnOutput<'tcx>,
295 ps: RefCell<UnsafetyState>,
297 inh: &'a Inherited<'a, 'tcx>,
299 ccx: &'a CrateCtxt<'a, 'tcx>,
302 impl<'a, 'tcx> Inherited<'a, 'tcx> {
303 fn new(tcx: &'a ty::ctxt<'tcx>,
304 tables: &'a RefCell<ty::Tables<'tcx>>,
305 param_env: ty::ParameterEnvironment<'a, 'tcx>)
306 -> Inherited<'a, 'tcx> {
309 infcx: infer::new_infer_ctxt(tcx, tables, Some(param_env), true),
310 locals: RefCell::new(NodeMap()),
312 deferred_call_resolutions: RefCell::new(DefIdMap()),
313 deferred_cast_checks: RefCell::new(Vec::new()),
317 fn normalize_associated_types_in<T>(&self,
319 body_id: ast::NodeId,
322 where T : TypeFoldable<'tcx> + HasTypeFlags
324 let mut fulfillment_cx = self.infcx.fulfillment_cx.borrow_mut();
325 assoc::normalize_associated_types_in(&self.infcx,
334 // Used by check_const and check_enum_variants
335 pub fn blank_fn_ctxt<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
336 inh: &'a Inherited<'a, 'tcx>,
337 rty: ty::FnOutput<'tcx>,
338 body_id: ast::NodeId)
339 -> FnCtxt<'a, 'tcx> {
342 writeback_errors: Cell::new(false),
343 err_count_on_creation: ccx.tcx.sess.err_count(),
345 ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, 0)),
351 fn static_inherited_fields<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
352 tables: &'a RefCell<ty::Tables<'tcx>>)
353 -> Inherited<'a, 'tcx> {
354 // It's kind of a kludge to manufacture a fake function context
355 // and statement context, but we might as well do write the code only once
356 let param_env = ccx.tcx.empty_parameter_environment();
357 Inherited::new(ccx.tcx, &tables, param_env)
360 struct CheckItemTypesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
361 struct CheckItemBodiesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
363 impl<'a, 'tcx> Visitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> {
364 fn visit_item(&mut self, i: &'tcx hir::Item) {
365 check_item_type(self.ccx, i);
366 intravisit::walk_item(self, i);
369 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
371 hir::TyFixedLengthVec(_, ref expr) => {
372 check_const_in_type(self.ccx, &**expr, self.ccx.tcx.types.usize);
377 intravisit::walk_ty(self, t);
381 impl<'a, 'tcx> Visitor<'tcx> for CheckItemBodiesVisitor<'a, 'tcx> {
382 fn visit_item(&mut self, i: &'tcx hir::Item) {
383 check_item_body(self.ccx, i);
387 pub fn check_wf_old(ccx: &CrateCtxt) {
388 // FIXME(#25759). The new code below is much more reliable but (for now)
389 // only generates warnings. So as to ensure that we continue
390 // getting errors where we used to get errors, we run the old wf
391 // code first and abort if it encounters any errors. If no abort
392 // comes, we run the new code and issue warnings.
393 let krate = ccx.tcx.map.krate();
394 let mut visit = wf::CheckTypeWellFormedVisitor::new(ccx);
395 krate.visit_all_items(&mut visit);
397 // If types are not well-formed, it leads to all manner of errors
398 // downstream, so stop reporting errors at this point.
399 ccx.tcx.sess.abort_if_errors();
402 pub fn check_wf_new(ccx: &CrateCtxt) {
403 let krate = ccx.tcx.map.krate();
404 let mut visit = wfcheck::CheckTypeWellFormedVisitor::new(ccx);
405 krate.visit_all_items(&mut visit);
407 // If types are not well-formed, it leads to all manner of errors
408 // downstream, so stop reporting errors at this point.
409 ccx.tcx.sess.abort_if_errors();
412 pub fn check_item_types(ccx: &CrateCtxt) {
413 let krate = ccx.tcx.map.krate();
414 let mut visit = CheckItemTypesVisitor { ccx: ccx };
415 krate.visit_all_items(&mut visit);
416 ccx.tcx.sess.abort_if_errors();
419 pub fn check_item_bodies(ccx: &CrateCtxt) {
420 let krate = ccx.tcx.map.krate();
421 let mut visit = CheckItemBodiesVisitor { ccx: ccx };
422 krate.visit_all_items(&mut visit);
424 ccx.tcx.sess.abort_if_errors();
427 pub fn check_drop_impls(ccx: &CrateCtxt) {
428 let drop_trait = match ccx.tcx.lang_items.drop_trait() {
429 Some(id) => ccx.tcx.lookup_trait_def(id), None => { return }
431 drop_trait.for_each_impl(ccx.tcx, |drop_impl_did| {
432 if drop_impl_did.is_local() {
433 match dropck::check_drop_impl(ccx.tcx, drop_impl_did) {
436 assert!(ccx.tcx.sess.has_errors());
442 ccx.tcx.sess.abort_if_errors();
445 fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
446 decl: &'tcx hir::FnDecl,
447 body: &'tcx hir::Block,
451 param_env: ty::ParameterEnvironment<'a, 'tcx>)
454 ty::TyBareFn(_, ref fn_ty) => {
455 let tables = RefCell::new(ty::Tables::empty());
456 let inh = Inherited::new(ccx.tcx, &tables, param_env);
458 // Compute the fty from point of view of inside fn.
459 let fn_scope = ccx.tcx.region_maps.item_extent(body.id);
461 fn_ty.sig.subst(ccx.tcx, &inh.infcx.parameter_environment.free_substs);
463 ccx.tcx.liberate_late_bound_regions(fn_scope, &fn_sig);
465 inh.normalize_associated_types_in(body.span,
469 let fcx = check_fn(ccx, fn_ty.unsafety, fn_id, &fn_sig,
470 decl, fn_id, body, &inh);
472 fcx.select_all_obligations_and_apply_defaults();
473 upvar::closure_analyze_fn(&fcx, fn_id, decl, body);
474 fcx.select_obligations_where_possible();
476 fcx.select_all_obligations_or_error(); // Casts can introduce new obligations.
478 regionck::regionck_fn(&fcx, fn_id, fn_span, decl, body);
479 writeback::resolve_type_vars_in_fn(&fcx, decl, body);
481 _ => ccx.tcx.sess.impossible_case(body.span,
482 "check_bare_fn: function type expected")
486 struct GatherLocalsVisitor<'a, 'tcx: 'a> {
487 fcx: &'a FnCtxt<'a, 'tcx>
490 impl<'a, 'tcx> GatherLocalsVisitor<'a, 'tcx> {
491 fn assign(&mut self, _span: Span, nid: ast::NodeId, ty_opt: Option<Ty<'tcx>>) -> Ty<'tcx> {
494 // infer the variable's type
495 let var_ty = self.fcx.infcx().next_ty_var();
496 self.fcx.inh.locals.borrow_mut().insert(nid, var_ty);
500 // take type that the user specified
501 self.fcx.inh.locals.borrow_mut().insert(nid, typ);
508 impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> {
509 // Add explicitly-declared locals.
510 fn visit_local(&mut self, local: &'tcx hir::Local) {
511 let o_ty = match local.ty {
512 Some(ref ty) => Some(self.fcx.to_ty(&**ty)),
515 self.assign(local.span, local.id, o_ty);
516 debug!("Local variable {:?} is assigned type {}",
518 self.fcx.infcx().ty_to_string(
519 self.fcx.inh.locals.borrow().get(&local.id).unwrap().clone()));
520 intravisit::walk_local(self, local);
523 // Add pattern bindings.
524 fn visit_pat(&mut self, p: &'tcx hir::Pat) {
525 if let hir::PatIdent(_, ref path1, _) = p.node {
526 if pat_util::pat_is_binding(&self.fcx.ccx.tcx.def_map.borrow(), p) {
527 let var_ty = self.assign(p.span, p.id, None);
529 self.fcx.require_type_is_sized(var_ty, p.span,
530 traits::VariableType(p.id));
532 debug!("Pattern binding {} is assigned to {} with type {:?}",
534 self.fcx.infcx().ty_to_string(
535 self.fcx.inh.locals.borrow().get(&p.id).unwrap().clone()),
539 intravisit::walk_pat(self, p);
542 fn visit_block(&mut self, b: &'tcx hir::Block) {
543 // non-obvious: the `blk` variable maps to region lb, so
544 // we have to keep this up-to-date. This
545 // is... unfortunate. It'd be nice to not need this.
546 intravisit::walk_block(self, b);
549 // Since an expr occurs as part of the type fixed size arrays we
550 // need to record the type for that node
551 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
553 hir::TyFixedLengthVec(ref ty, ref count_expr) => {
554 self.visit_ty(&**ty);
555 check_expr_with_hint(self.fcx, &**count_expr, self.fcx.tcx().types.usize);
557 hir::TyBareFn(ref function_declaration) => {
558 intravisit::walk_fn_decl_nopat(self, &function_declaration.decl);
559 walk_list!(self, visit_lifetime_def, &function_declaration.lifetimes);
561 _ => intravisit::walk_ty(self, t)
565 // Don't descend into the bodies of nested closures
566 fn visit_fn(&mut self, _: intravisit::FnKind<'tcx>, _: &'tcx hir::FnDecl,
567 _: &'tcx hir::Block, _: Span, _: ast::NodeId) { }
570 /// Helper used by check_bare_fn and check_expr_fn. Does the grungy work of checking a function
571 /// body and returns the function context used for that purpose, since in the case of a fn item
572 /// there is still a bit more to do.
575 /// * inherited: other fields inherited from the enclosing fn (if any)
576 fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
577 unsafety: hir::Unsafety,
578 unsafety_id: ast::NodeId,
579 fn_sig: &ty::FnSig<'tcx>,
580 decl: &'tcx hir::FnDecl,
582 body: &'tcx hir::Block,
583 inherited: &'a Inherited<'a, 'tcx>)
587 let err_count_on_creation = tcx.sess.err_count();
589 let arg_tys = &fn_sig.inputs;
590 let ret_ty = fn_sig.output;
592 debug!("check_fn(arg_tys={:?}, ret_ty={:?}, fn_id={})",
597 // Create the function context. This is either derived from scratch or,
598 // in the case of function expressions, based on the outer context.
601 writeback_errors: Cell::new(false),
602 err_count_on_creation: err_count_on_creation,
604 ps: RefCell::new(UnsafetyState::function(unsafety, unsafety_id)),
609 if let ty::FnConverging(ret_ty) = ret_ty {
610 fcx.require_type_is_sized(ret_ty, decl.output.span(), traits::ReturnType);
613 debug!("fn-sig-map: fn_id={} fn_sig={:?}", fn_id, fn_sig);
615 inherited.tables.borrow_mut().liberated_fn_sigs.insert(fn_id, fn_sig.clone());
618 let mut visit = GatherLocalsVisitor { fcx: &fcx, };
620 // Add formal parameters.
621 for (arg_ty, input) in arg_tys.iter().zip(&decl.inputs) {
622 // The type of the argument must be well-formed.
624 // NB -- this is now checked in wfcheck, but that
625 // currently only results in warnings, so we issue an
626 // old-style WF obligation here so that we still get the
627 // errors that we used to get.
628 fcx.register_old_wf_obligation(arg_ty, input.ty.span, traits::MiscObligation);
630 // Create type variables for each argument.
631 pat_util::pat_bindings(
634 |_bm, pat_id, sp, _path| {
635 let var_ty = visit.assign(sp, pat_id, None);
636 fcx.require_type_is_sized(var_ty, sp,
637 traits::VariableType(pat_id));
640 // Check the pattern.
643 map: pat_id_map(&tcx.def_map, &*input.pat),
645 _match::check_pat(&pcx, &*input.pat, *arg_ty);
648 visit.visit_block(body);
651 check_block_with_expected(&fcx, body, match ret_ty {
652 ty::FnConverging(result_type) => ExpectHasType(result_type),
653 ty::FnDiverging => NoExpectation
656 for (input, arg) in decl.inputs.iter().zip(arg_tys) {
657 fcx.write_ty(input.id, arg);
663 pub fn check_struct(ccx: &CrateCtxt, id: ast::NodeId, span: Span) {
666 check_representable(tcx, span, id, "struct");
668 if tcx.lookup_simd(ccx.tcx.map.local_def_id(id)) {
669 check_simd(tcx, span, id);
673 pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
674 debug!("check_item_type(it.id={}, it.name={})",
676 ccx.tcx.item_path_str(ccx.tcx.map.local_def_id(it.id)));
677 let _indenter = indenter();
679 // Consts can play a role in type-checking, so they are included here.
680 hir::ItemStatic(_, _, ref e) |
681 hir::ItemConst(_, ref e) => check_const(ccx, it.span, &**e, it.id),
682 hir::ItemEnum(ref enum_definition, _) => {
683 check_enum_variants(ccx,
685 &enum_definition.variants,
688 hir::ItemFn(..) => {} // entirely within check_item_body
689 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
690 debug!("ItemImpl {} with id {}", it.name, it.id);
691 match ccx.tcx.impl_trait_ref(ccx.tcx.map.local_def_id(it.id)) {
692 Some(impl_trait_ref) => {
693 check_impl_items_against_trait(ccx,
701 hir::ItemTrait(_, ref generics, _, _) => {
702 check_trait_on_unimplemented(ccx, generics, it);
704 hir::ItemStruct(..) => {
705 check_struct(ccx, it.id, it.span);
707 hir::ItemTy(_, ref generics) => {
708 let pty_ty = ccx.tcx.node_id_to_type(it.id);
709 check_bounds_are_used(ccx, &generics.ty_params, pty_ty);
711 hir::ItemForeignMod(ref m) => {
712 if m.abi == abi::RustIntrinsic {
713 for item in &m.items {
714 intrinsic::check_intrinsic_type(ccx, item);
716 } else if m.abi == abi::PlatformIntrinsic {
717 for item in &m.items {
718 intrinsic::check_platform_intrinsic_type(ccx, item);
721 for item in &m.items {
722 let pty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(item.id));
723 if !pty.generics.types.is_empty() {
724 span_err!(ccx.tcx.sess, item.span, E0044,
725 "foreign items may not have type parameters");
726 span_help!(ccx.tcx.sess, item.span,
727 "consider using specialization instead of \
731 if let hir::ForeignItemFn(ref fn_decl, _) = item.node {
732 require_c_abi_if_variadic(ccx.tcx, fn_decl, m.abi, item.span);
737 _ => {/* nothing to do */ }
741 pub fn check_item_body<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
742 debug!("check_item_body(it.id={}, it.name={})",
744 ccx.tcx.item_path_str(ccx.tcx.map.local_def_id(it.id)));
745 let _indenter = indenter();
747 hir::ItemFn(ref decl, _, _, _, _, ref body) => {
748 let fn_pty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(it.id));
749 let param_env = ParameterEnvironment::for_item(ccx.tcx, it.id);
750 check_bare_fn(ccx, &**decl, &**body, it.id, it.span, fn_pty.ty, param_env);
752 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
753 debug!("ItemImpl {} with id {}", it.name, it.id);
755 let impl_pty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(it.id));
757 for impl_item in impl_items {
758 match impl_item.node {
759 hir::ImplItemKind::Const(_, ref expr) => {
760 check_const(ccx, impl_item.span, &*expr, impl_item.id)
762 hir::ImplItemKind::Method(ref sig, ref body) => {
763 check_method_body(ccx, &impl_pty.generics, sig, body,
764 impl_item.id, impl_item.span);
766 hir::ImplItemKind::Type(_) => {
767 // Nothing to do here.
772 hir::ItemTrait(_, _, _, ref trait_items) => {
773 let trait_def = ccx.tcx.lookup_trait_def(ccx.tcx.map.local_def_id(it.id));
774 for trait_item in trait_items {
775 match trait_item.node {
776 hir::ConstTraitItem(_, Some(ref expr)) => {
777 check_const(ccx, trait_item.span, &*expr, trait_item.id)
779 hir::MethodTraitItem(ref sig, Some(ref body)) => {
780 check_trait_fn_not_const(ccx, trait_item.span, sig.constness);
782 check_method_body(ccx, &trait_def.generics, sig, body,
783 trait_item.id, trait_item.span);
785 hir::MethodTraitItem(ref sig, None) => {
786 check_trait_fn_not_const(ccx, trait_item.span, sig.constness);
788 hir::ConstTraitItem(_, None) |
789 hir::TypeTraitItem(..) => {
795 _ => {/* nothing to do */ }
799 fn check_trait_fn_not_const<'a,'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
801 constness: hir::Constness)
804 hir::Constness::NotConst => {
807 hir::Constness::Const => {
808 span_err!(ccx.tcx.sess, span, E0379, "trait fns cannot be declared const");
813 fn check_trait_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
814 generics: &hir::Generics,
816 if let Some(ref attr) = item.attrs.iter().find(|a| {
817 a.check_name("rustc_on_unimplemented")
819 if let Some(ref istring) = attr.value_str() {
820 let parser = Parser::new(&istring);
821 let types = &*generics.ty_params;
822 for token in parser {
824 Piece::String(_) => (), // Normal string, no need to check it
825 Piece::NextArgument(a) => match a.position {
826 // `{Self}` is allowed
827 Position::ArgumentNamed(s) if s == "Self" => (),
828 // So is `{A}` if A is a type parameter
829 Position::ArgumentNamed(s) => match types.iter().find(|t| {
834 span_err!(ccx.tcx.sess, attr.span, E0230,
835 "there is no type parameter \
840 // `{:1}` and `{}` are not to be used
841 Position::ArgumentIs(_) | Position::ArgumentNext => {
842 span_err!(ccx.tcx.sess, attr.span, E0231,
843 "only named substitution \
844 parameters are allowed");
850 span_err!(ccx.tcx.sess, attr.span, E0232,
851 "this attribute must have a value, \
852 eg `#[rustc_on_unimplemented = \"foo\"]`")
857 /// Type checks a method body.
861 /// * `item_generics`: generics defined on the impl/trait that contains
863 /// * `self_bound`: bound for the `Self` type parameter, if any
864 /// * `method`: the method definition
865 fn check_method_body<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
866 item_generics: &ty::Generics<'tcx>,
867 sig: &'tcx hir::MethodSig,
868 body: &'tcx hir::Block,
869 id: ast::NodeId, span: Span) {
870 debug!("check_method_body(item_generics={:?}, id={})",
872 let param_env = ParameterEnvironment::for_item(ccx.tcx, id);
874 let fty = ccx.tcx.node_id_to_type(id);
875 debug!("check_method_body: fty={:?}", fty);
877 check_bare_fn(ccx, &sig.decl, body, id, span, fty, param_env);
880 fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
882 impl_trait_ref: &ty::TraitRef<'tcx>,
883 impl_items: &[hir::ImplItem]) {
884 // Locate trait methods
886 let trait_items = tcx.trait_items(impl_trait_ref.def_id);
887 let mut overridden_associated_type = None;
889 // Check existing impl methods to see if they are both present in trait
890 // and compatible with trait signature
891 for impl_item in impl_items {
892 let ty_impl_item = ccx.tcx.impl_or_trait_item(ccx.tcx.map.local_def_id(impl_item.id));
893 let ty_trait_item = trait_items.iter()
894 .find(|ac| ac.name() == ty_impl_item.name())
896 // This is checked by resolve
897 tcx.sess.span_bug(impl_item.span,
898 &format!("impl-item `{}` is not a member of `{:?}`",
902 match impl_item.node {
903 hir::ImplItemKind::Const(..) => {
904 let impl_const = match ty_impl_item {
905 ty::ConstTraitItem(ref cti) => cti,
906 _ => tcx.sess.span_bug(impl_item.span, "non-const impl-item for const")
909 // Find associated const definition.
910 if let &ty::ConstTraitItem(ref trait_const) = ty_trait_item {
911 compare_const_impl(ccx.tcx,
917 span_err!(tcx.sess, impl_item.span, E0323,
918 "item `{}` is an associated const, \
919 which doesn't match its trait `{:?}`",
924 hir::ImplItemKind::Method(ref sig, ref body) => {
925 check_trait_fn_not_const(ccx, impl_item.span, sig.constness);
927 let impl_method = match ty_impl_item {
928 ty::MethodTraitItem(ref mti) => mti,
929 _ => tcx.sess.span_bug(impl_item.span, "non-method impl-item for method")
932 if let &ty::MethodTraitItem(ref trait_method) = ty_trait_item {
933 compare_impl_method(ccx.tcx,
940 span_err!(tcx.sess, impl_item.span, E0324,
941 "item `{}` is an associated method, \
942 which doesn't match its trait `{:?}`",
947 hir::ImplItemKind::Type(_) => {
948 let impl_type = match ty_impl_item {
949 ty::TypeTraitItem(ref tti) => tti,
950 _ => tcx.sess.span_bug(impl_item.span, "non-type impl-item for type")
953 if let &ty::TypeTraitItem(ref at) = ty_trait_item {
954 if let Some(_) = at.ty {
955 overridden_associated_type = Some(impl_item);
958 span_err!(tcx.sess, impl_item.span, E0325,
959 "item `{}` is an associated type, \
960 which doesn't match its trait `{:?}`",
968 // Check for missing items from trait
969 let provided_methods = tcx.provided_trait_methods(impl_trait_ref.def_id);
970 let mut missing_items = Vec::new();
971 let mut invalidated_items = Vec::new();
972 let associated_type_overridden = overridden_associated_type.is_some();
973 for trait_item in trait_items.iter() {
975 ty::ConstTraitItem(ref associated_const) => {
976 let is_implemented = impl_items.iter().any(|ii| {
978 hir::ImplItemKind::Const(..) => {
979 ii.name == associated_const.name
984 let is_provided = associated_const.has_value;
988 missing_items.push(associated_const.name);
989 } else if associated_type_overridden {
990 invalidated_items.push(associated_const.name);
994 ty::MethodTraitItem(ref trait_method) => {
996 impl_items.iter().any(|ii| {
998 hir::ImplItemKind::Method(..) => {
999 ii.name == trait_method.name
1005 provided_methods.iter().any(|m| m.name == trait_method.name);
1006 if !is_implemented {
1008 missing_items.push(trait_method.name);
1009 } else if associated_type_overridden {
1010 invalidated_items.push(trait_method.name);
1014 ty::TypeTraitItem(ref associated_type) => {
1015 let is_implemented = impl_items.iter().any(|ii| {
1017 hir::ImplItemKind::Type(_) => {
1018 ii.name == associated_type.name
1023 let is_provided = associated_type.ty.is_some();
1024 if !is_implemented {
1026 missing_items.push(associated_type.name);
1027 } else if associated_type_overridden {
1028 invalidated_items.push(associated_type.name);
1035 if !missing_items.is_empty() {
1036 span_err!(tcx.sess, impl_span, E0046,
1037 "not all trait items implemented, missing: `{}`",
1038 missing_items.iter()
1039 .map(|name| name.to_string())
1040 .collect::<Vec<_>>().join("`, `"))
1043 if !invalidated_items.is_empty() {
1044 let invalidator = overridden_associated_type.unwrap();
1045 span_err!(tcx.sess, invalidator.span, E0399,
1046 "the following trait items need to be reimplemented \
1047 as `{}` was overridden: `{}`",
1049 invalidated_items.iter()
1050 .map(|name| name.to_string())
1051 .collect::<Vec<_>>().join("`, `"))
1055 fn report_cast_to_unsized_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
1062 let tstr = fcx.infcx().ty_to_string(t_cast);
1063 fcx.type_error_message(span, |actual| {
1064 format!("cast to unsized type: `{}` as `{}`", actual, tstr)
1067 ty::TyRef(_, ty::TypeAndMut { mutbl: mt, .. }) => {
1068 let mtstr = match mt {
1069 hir::MutMutable => "mut ",
1070 hir::MutImmutable => ""
1072 if t_cast.is_trait() {
1073 match fcx.tcx().sess.codemap().span_to_snippet(t_span) {
1075 fcx.tcx().sess.span_suggestion(t_span,
1076 "try casting to a reference instead:",
1077 format!("&{}{}", mtstr, s));
1080 span_help!(fcx.tcx().sess, t_span,
1081 "did you mean `&{}{}`?", mtstr, tstr),
1084 span_help!(fcx.tcx().sess, span,
1085 "consider using an implicit coercion to `&{}{}` instead",
1090 match fcx.tcx().sess.codemap().span_to_snippet(t_span) {
1092 fcx.tcx().sess.span_suggestion(t_span,
1093 "try casting to a `Box` instead:",
1094 format!("Box<{}>", s));
1097 span_help!(fcx.tcx().sess, t_span, "did you mean `Box<{}>`?", tstr),
1101 span_help!(fcx.tcx().sess, e_span,
1102 "consider using a box or reference as appropriate");
1105 fcx.write_error(id);
1109 impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> {
1110 fn tcx(&self) -> &ty::ctxt<'tcx> { self.ccx.tcx }
1112 fn get_item_type_scheme(&self, _: Span, id: DefId)
1113 -> Result<ty::TypeScheme<'tcx>, ErrorReported>
1115 Ok(self.tcx().lookup_item_type(id))
1118 fn get_trait_def(&self, _: Span, id: DefId)
1119 -> Result<&'tcx ty::TraitDef<'tcx>, ErrorReported>
1121 Ok(self.tcx().lookup_trait_def(id))
1124 fn ensure_super_predicates(&self, _: Span, _: DefId) -> Result<(), ErrorReported> {
1125 // all super predicates are ensured during collect pass
1129 fn get_free_substs(&self) -> Option<&Substs<'tcx>> {
1130 Some(&self.inh.infcx.parameter_environment.free_substs)
1133 fn get_type_parameter_bounds(&self,
1135 node_id: ast::NodeId)
1136 -> Result<Vec<ty::PolyTraitRef<'tcx>>, ErrorReported>
1138 let def = self.tcx().type_parameter_def(node_id);
1139 let r = self.inh.infcx.parameter_environment
1142 .filter_map(|predicate| {
1144 ty::Predicate::Trait(ref data) => {
1145 if data.0.self_ty().is_param(def.space, def.index) {
1146 Some(data.to_poly_trait_ref())
1160 fn trait_defines_associated_type_named(&self,
1161 trait_def_id: DefId,
1162 assoc_name: ast::Name)
1165 let trait_def = self.ccx.tcx.lookup_trait_def(trait_def_id);
1166 trait_def.associated_type_names.contains(&assoc_name)
1170 ty_param_def: Option<ty::TypeParameterDef<'tcx>>,
1171 substs: Option<&mut subst::Substs<'tcx>>,
1172 space: Option<subst::ParamSpace>,
1173 span: Span) -> Ty<'tcx> {
1174 // Grab the default doing subsitution
1175 let default = ty_param_def.and_then(|def| {
1176 def.default.map(|ty| type_variable::Default {
1177 ty: ty.subst_spanned(self.tcx(), substs.as_ref().unwrap(), Some(span)),
1179 def_id: def.default_def_id
1183 let ty_var = self.infcx().next_ty_var_with_default(default);
1185 // Finally we add the type variable to the substs
1188 Some(substs) => { substs.types.push(space.unwrap(), ty_var); ty_var }
1192 fn projected_ty_from_poly_trait_ref(&self,
1194 poly_trait_ref: ty::PolyTraitRef<'tcx>,
1195 item_name: ast::Name)
1198 let (trait_ref, _) =
1199 self.infcx().replace_late_bound_regions_with_fresh_var(
1201 infer::LateBoundRegionConversionTime::AssocTypeProjection(item_name),
1204 self.normalize_associated_type(span, trait_ref, item_name)
1207 fn projected_ty(&self,
1209 trait_ref: ty::TraitRef<'tcx>,
1210 item_name: ast::Name)
1213 self.normalize_associated_type(span, trait_ref, item_name)
1217 impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
1218 fn tcx(&self) -> &ty::ctxt<'tcx> { self.ccx.tcx }
1220 pub fn infcx(&self) -> &infer::InferCtxt<'a,'tcx> {
1224 pub fn param_env(&self) -> &ty::ParameterEnvironment<'a,'tcx> {
1225 &self.inh.infcx.parameter_environment
1228 pub fn sess(&self) -> &Session {
1232 pub fn err_count_since_creation(&self) -> usize {
1233 self.ccx.tcx.sess.err_count() - self.err_count_on_creation
1236 /// Resolves type variables in `ty` if possible. Unlike the infcx
1237 /// version, this version will also select obligations if it seems
1238 /// useful, in an effort to get more type information.
1239 fn resolve_type_vars_if_possible(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> {
1240 debug!("resolve_type_vars_if_possible(ty={:?})", ty);
1242 // No TyInfer()? Nothing needs doing.
1243 if !ty.has_infer_types() {
1244 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1248 // If `ty` is a type variable, see whether we already know what it is.
1249 ty = self.infcx().resolve_type_vars_if_possible(&ty);
1250 if !ty.has_infer_types() {
1251 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1255 // If not, try resolving any new fcx obligations that have cropped up.
1256 self.select_new_obligations();
1257 ty = self.infcx().resolve_type_vars_if_possible(&ty);
1258 if !ty.has_infer_types() {
1259 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1263 // If not, try resolving *all* pending obligations as much as
1264 // possible. This can help substantially when there are
1265 // indirect dependencies that don't seem worth tracking
1267 self.select_obligations_where_possible();
1268 ty = self.infcx().resolve_type_vars_if_possible(&ty);
1270 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1274 fn record_deferred_call_resolution(&self,
1275 closure_def_id: DefId,
1276 r: DeferredCallResolutionHandler<'tcx>) {
1277 let mut deferred_call_resolutions = self.inh.deferred_call_resolutions.borrow_mut();
1278 deferred_call_resolutions.entry(closure_def_id).or_insert(vec![]).push(r);
1281 fn remove_deferred_call_resolutions(&self,
1282 closure_def_id: DefId)
1283 -> Vec<DeferredCallResolutionHandler<'tcx>>
1285 let mut deferred_call_resolutions = self.inh.deferred_call_resolutions.borrow_mut();
1286 deferred_call_resolutions.remove(&closure_def_id).unwrap_or(Vec::new())
1289 pub fn tag(&self) -> String {
1290 let self_ptr: *const FnCtxt = self;
1291 format!("{:?}", self_ptr)
1294 pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> Ty<'tcx> {
1295 match self.inh.locals.borrow().get(&nid) {
1298 span_err!(self.tcx().sess, span, E0513,
1299 "no type for local variable {}",
1301 self.tcx().types.err
1307 pub fn write_ty(&self, node_id: ast::NodeId, ty: Ty<'tcx>) {
1308 debug!("write_ty({}, {:?}) in fcx {}",
1309 node_id, ty, self.tag());
1310 self.inh.tables.borrow_mut().node_types.insert(node_id, ty);
1313 pub fn write_substs(&self, node_id: ast::NodeId, substs: ty::ItemSubsts<'tcx>) {
1314 if !substs.substs.is_noop() {
1315 debug!("write_substs({}, {:?}) in fcx {}",
1320 self.inh.tables.borrow_mut().item_substs.insert(node_id, substs);
1324 pub fn write_autoderef_adjustment(&self,
1325 node_id: ast::NodeId,
1327 self.write_adjustment(
1329 adjustment::AdjustDerefRef(adjustment::AutoDerefRef {
1337 pub fn write_adjustment(&self,
1338 node_id: ast::NodeId,
1339 adj: adjustment::AutoAdjustment<'tcx>) {
1340 debug!("write_adjustment(node_id={}, adj={:?})", node_id, adj);
1342 if adj.is_identity() {
1346 self.inh.tables.borrow_mut().adjustments.insert(node_id, adj);
1349 /// Basically whenever we are converting from a type scheme into
1350 /// the fn body space, we always want to normalize associated
1351 /// types as well. This function combines the two.
1352 fn instantiate_type_scheme<T>(&self,
1354 substs: &Substs<'tcx>,
1357 where T : TypeFoldable<'tcx> + HasTypeFlags
1359 let value = value.subst(self.tcx(), substs);
1360 let result = self.normalize_associated_types_in(span, &value);
1361 debug!("instantiate_type_scheme(value={:?}, substs={:?}) = {:?}",
1368 /// As `instantiate_type_scheme`, but for the bounds found in a
1369 /// generic type scheme.
1370 fn instantiate_bounds(&self,
1372 substs: &Substs<'tcx>,
1373 bounds: &ty::GenericPredicates<'tcx>)
1374 -> ty::InstantiatedPredicates<'tcx>
1376 ty::InstantiatedPredicates {
1377 predicates: self.instantiate_type_scheme(span, substs, &bounds.predicates)
1382 fn normalize_associated_types_in<T>(&self, span: Span, value: &T) -> T
1383 where T : TypeFoldable<'tcx> + HasTypeFlags
1385 self.inh.normalize_associated_types_in(span, self.body_id, value)
1388 fn normalize_associated_type(&self,
1390 trait_ref: ty::TraitRef<'tcx>,
1391 item_name: ast::Name)
1394 let cause = traits::ObligationCause::new(span,
1396 traits::ObligationCauseCode::MiscObligation);
1401 .normalize_projection_type(self.infcx(),
1403 trait_ref: trait_ref,
1404 item_name: item_name,
1409 /// Instantiates the type in `did` with the generics in `path` and returns
1410 /// it (registering the necessary trait obligations along the way).
1412 /// Note that this function is only intended to be used with type-paths,
1413 /// not with value-paths.
1414 pub fn instantiate_type(&self,
1419 debug!("instantiate_type(did={:?}, path={:?})", did, path);
1421 self.tcx().lookup_item_type(did);
1422 let type_predicates =
1423 self.tcx().lookup_predicates(did);
1424 let substs = astconv::ast_path_substs_for_ty(self, self,
1426 PathParamMode::Optional,
1427 &type_scheme.generics,
1428 path.segments.last().unwrap());
1429 debug!("instantiate_type: ty={:?} substs={:?}", &type_scheme.ty, &substs);
1431 self.instantiate_bounds(path.span, &substs, &type_predicates);
1432 self.add_obligations_for_parameters(
1433 traits::ObligationCause::new(
1436 traits::ItemObligation(did)),
1439 self.instantiate_type_scheme(path.span, &substs, &type_scheme.ty)
1442 /// Return the dict-like variant corresponding to a given `Def`.
1443 pub fn def_struct_variant(&self,
1446 -> Option<(ty::AdtDef<'tcx>, ty::VariantDef<'tcx>)>
1448 let (adt, variant) = match def {
1449 def::DefVariant(enum_id, variant_id, _) => {
1450 let adt = self.tcx().lookup_adt_def(enum_id);
1451 (adt, adt.variant_with_id(variant_id))
1453 def::DefTy(did, _) | def::DefStruct(did) => {
1454 let typ = self.tcx().lookup_item_type(did);
1455 if let ty::TyStruct(adt, _) = typ.ty.sty {
1456 (adt, adt.struct_variant())
1464 let var_kind = variant.kind();
1465 if var_kind == ty::VariantKind::Struct {
1466 Some((adt, variant))
1467 } else if var_kind == ty::VariantKind::Unit {
1468 if !self.tcx().sess.features.borrow().braced_empty_structs {
1469 self.tcx().sess.span_err(span, "empty structs and enum variants \
1470 with braces are unstable");
1471 fileline_help!(self.tcx().sess, span, "add #![feature(braced_empty_structs)] to \
1472 the crate features to enable");
1475 Some((adt, variant))
1481 pub fn write_nil(&self, node_id: ast::NodeId) {
1482 self.write_ty(node_id, self.tcx().mk_nil());
1484 pub fn write_error(&self, node_id: ast::NodeId) {
1485 self.write_ty(node_id, self.tcx().types.err);
1488 pub fn require_type_meets(&self,
1491 code: traits::ObligationCauseCode<'tcx>,
1492 bound: ty::BuiltinBound)
1494 self.register_builtin_bound(
1497 traits::ObligationCause::new(span, self.body_id, code));
1500 pub fn require_type_is_sized(&self,
1503 code: traits::ObligationCauseCode<'tcx>)
1505 self.require_type_meets(ty, span, code, ty::BoundSized);
1508 pub fn require_expr_have_sized_type(&self,
1510 code: traits::ObligationCauseCode<'tcx>)
1512 self.require_type_is_sized(self.expr_ty(expr), expr.span, code);
1515 pub fn type_is_known_to_be_sized(&self,
1520 traits::type_known_to_meet_builtin_bound(self.infcx(),
1526 pub fn register_builtin_bound(&self,
1528 builtin_bound: ty::BuiltinBound,
1529 cause: traits::ObligationCause<'tcx>)
1531 self.inh.infcx.fulfillment_cx.borrow_mut()
1532 .register_builtin_bound(self.infcx(), ty, builtin_bound, cause);
1535 pub fn register_predicate(&self,
1536 obligation: traits::PredicateObligation<'tcx>)
1538 debug!("register_predicate({:?})",
1540 self.inh.infcx.fulfillment_cx
1542 .register_predicate_obligation(self.infcx(), obligation);
1545 pub fn to_ty(&self, ast_t: &hir::Ty) -> Ty<'tcx> {
1546 let t = ast_ty_to_ty(self, self, ast_t);
1547 self.register_wf_obligation(t, ast_t.span, traits::MiscObligation);
1551 pub fn expr_ty(&self, ex: &hir::Expr) -> Ty<'tcx> {
1552 match self.inh.tables.borrow().node_types.get(&ex.id) {
1555 self.tcx().sess.bug(&format!("no type for expr in fcx {}",
1561 /// Apply `adjustment` to the type of `expr`
1562 pub fn adjust_expr_ty(&self,
1564 adjustment: Option<&adjustment::AutoAdjustment<'tcx>>)
1567 let raw_ty = self.expr_ty(expr);
1568 let raw_ty = self.infcx().shallow_resolve(raw_ty);
1569 let resolve_ty = |ty: Ty<'tcx>| self.infcx().resolve_type_vars_if_possible(&ty);
1570 raw_ty.adjust(self.tcx(), expr.span, expr.id, adjustment, |method_call| {
1571 self.inh.tables.borrow().method_map.get(&method_call)
1572 .map(|method| resolve_ty(method.ty))
1576 pub fn node_ty(&self, id: ast::NodeId) -> Ty<'tcx> {
1577 match self.inh.tables.borrow().node_types.get(&id) {
1579 None if self.err_count_since_creation() != 0 => self.tcx().types.err,
1581 self.tcx().sess.bug(
1582 &format!("no type for node {}: {} in fcx {}",
1583 id, self.tcx().map.node_to_string(id),
1589 pub fn item_substs(&self) -> Ref<NodeMap<ty::ItemSubsts<'tcx>>> {
1590 // NOTE: @jroesch this is hack that appears to be fixed on nightly, will monitor if
1591 // it changes when we upgrade the snapshot compiler
1592 fn project_item_susbts<'a, 'tcx>(tables: &'a ty::Tables<'tcx>)
1593 -> &'a NodeMap<ty::ItemSubsts<'tcx>> {
1597 Ref::map(self.inh.tables.borrow(), project_item_susbts)
1600 pub fn opt_node_ty_substs<F>(&self,
1603 F: FnOnce(&ty::ItemSubsts<'tcx>),
1605 match self.inh.tables.borrow().item_substs.get(&id) {
1611 pub fn mk_subty(&self,
1612 a_is_expected: bool,
1616 -> Result<(), TypeError<'tcx>> {
1617 infer::mk_subty(self.infcx(), a_is_expected, origin, sub, sup)
1620 pub fn mk_eqty(&self,
1621 a_is_expected: bool,
1625 -> Result<(), TypeError<'tcx>> {
1626 infer::mk_eqty(self.infcx(), a_is_expected, origin, sub, sup)
1629 pub fn mk_subr(&self,
1630 origin: infer::SubregionOrigin<'tcx>,
1633 infer::mk_subr(self.infcx(), origin, sub, sup)
1636 pub fn type_error_message<M>(&self,
1639 actual_ty: Ty<'tcx>,
1640 err: Option<&TypeError<'tcx>>) where
1641 M: FnOnce(String) -> String,
1643 self.infcx().type_error_message(sp, mk_msg, actual_ty, err);
1646 pub fn report_mismatched_types(&self,
1650 err: &TypeError<'tcx>) {
1651 self.infcx().report_mismatched_types(sp, e, a, err)
1654 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1655 /// outlive the region `r`.
1656 pub fn register_region_obligation(&self,
1659 cause: traits::ObligationCause<'tcx>)
1661 let mut fulfillment_cx = self.inh.infcx.fulfillment_cx.borrow_mut();
1662 fulfillment_cx.register_region_obligation(ty, region, cause);
1665 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1666 /// outlive the region `r`.
1667 pub fn register_wf_obligation(&self,
1670 code: traits::ObligationCauseCode<'tcx>)
1672 // WF obligations never themselves fail, so no real need to give a detailed cause:
1673 let cause = traits::ObligationCause::new(span, self.body_id, code);
1674 self.register_predicate(traits::Obligation::new(cause, ty::Predicate::WellFormed(ty)));
1677 pub fn register_old_wf_obligation(&self,
1680 code: traits::ObligationCauseCode<'tcx>)
1682 // Registers an "old-style" WF obligation that uses the
1683 // implicator code. This is basically a buggy version of
1684 // `register_wf_obligation` that is being kept around
1685 // temporarily just to help with phasing in the newer rules.
1687 // FIXME(#27579) all uses of this should be migrated to register_wf_obligation eventually
1688 let cause = traits::ObligationCause::new(span, self.body_id, code);
1689 self.register_region_obligation(ty, ty::ReEmpty, cause);
1692 /// Registers obligations that all types appearing in `substs` are well-formed.
1693 pub fn add_wf_bounds(&self, substs: &Substs<'tcx>, expr: &hir::Expr)
1695 for &ty in &substs.types {
1696 self.register_wf_obligation(ty, expr.span, traits::MiscObligation);
1700 /// Given a fully substituted set of bounds (`generic_bounds`), and the values with which each
1701 /// type/region parameter was instantiated (`substs`), creates and registers suitable
1702 /// trait/region obligations.
1704 /// For example, if there is a function:
1707 /// fn foo<'a,T:'a>(...)
1710 /// and a reference:
1716 /// Then we will create a fresh region variable `'$0` and a fresh type variable `$1` for `'a`
1717 /// and `T`. This routine will add a region obligation `$1:'$0` and register it locally.
1718 pub fn add_obligations_for_parameters(&self,
1719 cause: traits::ObligationCause<'tcx>,
1720 predicates: &ty::InstantiatedPredicates<'tcx>)
1722 assert!(!predicates.has_escaping_regions());
1724 debug!("add_obligations_for_parameters(predicates={:?})",
1727 for obligation in traits::predicates_for_generics(cause, predicates) {
1728 self.register_predicate(obligation);
1732 // FIXME(arielb1): use this instead of field.ty everywhere
1733 pub fn field_ty(&self,
1735 field: ty::FieldDef<'tcx>,
1736 substs: &Substs<'tcx>)
1739 self.normalize_associated_types_in(span,
1740 &field.ty(self.tcx(), substs))
1743 // Only for fields! Returns <none> for methods>
1744 // Indifferent to privacy flags
1745 fn check_casts(&self) {
1746 let mut deferred_cast_checks = self.inh.deferred_cast_checks.borrow_mut();
1747 for cast in deferred_cast_checks.drain(..) {
1752 /// Apply "fallbacks" to some types
1753 /// ! gets replaced with (), unconstrained ints with i32, and unconstrained floats with f64.
1754 fn default_type_parameters(&self) {
1755 use middle::ty::error::UnconstrainedNumeric::Neither;
1756 use middle::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1757 for ty in &self.infcx().unsolved_variables() {
1758 let resolved = self.infcx().resolve_type_vars_if_possible(ty);
1759 if self.infcx().type_var_diverges(resolved) {
1760 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1762 match self.infcx().type_is_unconstrained_numeric(resolved) {
1763 UnconstrainedInt => {
1764 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
1766 UnconstrainedFloat => {
1767 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
1775 fn select_all_obligations_and_apply_defaults(&self) {
1776 if self.tcx().sess.features.borrow().default_type_parameter_fallback {
1777 self.new_select_all_obligations_and_apply_defaults();
1779 self.old_select_all_obligations_and_apply_defaults();
1783 // Implements old type inference fallback algorithm
1784 fn old_select_all_obligations_and_apply_defaults(&self) {
1785 self.select_obligations_where_possible();
1786 self.default_type_parameters();
1787 self.select_obligations_where_possible();
1790 fn new_select_all_obligations_and_apply_defaults(&self) {
1791 use middle::ty::error::UnconstrainedNumeric::Neither;
1792 use middle::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1794 // For the time being this errs on the side of being memory wasteful but provides better
1796 // let type_variables = self.infcx().type_variables.clone();
1798 // There is a possibility that this algorithm will have to run an arbitrary number of times
1799 // to terminate so we bound it by the compiler's recursion limit.
1800 for _ in 0..self.tcx().sess.recursion_limit.get() {
1801 // First we try to solve all obligations, it is possible that the last iteration
1802 // has made it possible to make more progress.
1803 self.select_obligations_where_possible();
1805 let mut conflicts = Vec::new();
1807 // Collect all unsolved type, integral and floating point variables.
1808 let unsolved_variables = self.inh.infcx.unsolved_variables();
1810 // We must collect the defaults *before* we do any unification. Because we have
1811 // directly attached defaults to the type variables any unification that occurs
1812 // will erase defaults causing conflicting defaults to be completely ignored.
1813 let default_map: FnvHashMap<_, _> =
1816 .filter_map(|t| self.infcx().default(t).map(|d| (t, d)))
1819 let mut unbound_tyvars = HashSet::new();
1821 debug!("select_all_obligations_and_apply_defaults: defaults={:?}", default_map);
1823 // We loop over the unsolved variables, resolving them and if they are
1824 // and unconstrainted numberic type we add them to the set of unbound
1825 // variables. We do this so we only apply literal fallback to type
1826 // variables without defaults.
1827 for ty in &unsolved_variables {
1828 let resolved = self.infcx().resolve_type_vars_if_possible(ty);
1829 if self.infcx().type_var_diverges(resolved) {
1830 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1832 match self.infcx().type_is_unconstrained_numeric(resolved) {
1833 UnconstrainedInt | UnconstrainedFloat => {
1834 unbound_tyvars.insert(resolved);
1841 // We now remove any numeric types that also have defaults, and instead insert
1842 // the type variable with a defined fallback.
1843 for ty in &unsolved_variables {
1844 if let Some(_default) = default_map.get(ty) {
1845 let resolved = self.infcx().resolve_type_vars_if_possible(ty);
1847 debug!("select_all_obligations_and_apply_defaults: ty: {:?} with default: {:?}",
1850 match resolved.sty {
1851 ty::TyInfer(ty::TyVar(_)) => {
1852 unbound_tyvars.insert(ty);
1855 ty::TyInfer(ty::IntVar(_)) | ty::TyInfer(ty::FloatVar(_)) => {
1856 unbound_tyvars.insert(ty);
1857 if unbound_tyvars.contains(resolved) {
1858 unbound_tyvars.remove(resolved);
1867 // If there are no more fallbacks to apply at this point we have applied all possible
1868 // defaults and type inference will proceed as normal.
1869 if unbound_tyvars.is_empty() {
1873 // Finally we go through each of the unbound type variables and unify them with
1874 // the proper fallback, reporting a conflicting default error if any of the
1875 // unifications fail. We know it must be a conflicting default because the
1876 // variable would only be in `unbound_tyvars` and have a concrete value if
1877 // it had been solved by previously applying a default.
1879 // We wrap this in a transaction for error reporting, if we detect a conflict
1880 // we will rollback the inference context to its prior state so we can probe
1881 // for conflicts and correctly report them.
1884 let _ = self.infcx().commit_if_ok(|_: &infer::CombinedSnapshot| {
1885 for ty in &unbound_tyvars {
1886 if self.infcx().type_var_diverges(ty) {
1887 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1889 match self.infcx().type_is_unconstrained_numeric(ty) {
1890 UnconstrainedInt => {
1891 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
1893 UnconstrainedFloat => {
1894 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
1897 if let Some(default) = default_map.get(ty) {
1898 let default = default.clone();
1899 match infer::mk_eqty(self.infcx(), false,
1900 TypeOrigin::Misc(default.origin_span),
1904 conflicts.push((*ty, default));
1913 // If there are conflicts we rollback, otherwise commit
1914 if conflicts.len() > 0 {
1921 if conflicts.len() > 0 {
1922 // Loop through each conflicting default, figuring out the default that caused
1923 // a unification failure and then report an error for each.
1924 for (conflict, default) in conflicts {
1925 let conflicting_default =
1926 self.find_conflicting_default(&unbound_tyvars, &default_map, conflict)
1927 .unwrap_or(type_variable::Default {
1928 ty: self.infcx().next_ty_var(),
1929 origin_span: codemap::DUMMY_SP,
1930 def_id: self.tcx().map.local_def_id(0) // what do I put here?
1933 // This is to ensure that we elimnate any non-determinism from the error
1934 // reporting by fixing an order, it doesn't matter what order we choose
1935 // just that it is consistent.
1936 let (first_default, second_default) =
1937 if default.def_id < conflicting_default.def_id {
1938 (default, conflicting_default)
1940 (conflicting_default, default)
1944 self.infcx().report_conflicting_default_types(
1945 first_default.origin_span,
1952 self.select_obligations_where_possible();
1955 // For use in error handling related to default type parameter fallback. We explicitly
1956 // apply the default that caused conflict first to a local version of the type variable
1957 // table then apply defaults until we find a conflict. That default must be the one
1958 // that caused conflict earlier.
1959 fn find_conflicting_default(&self,
1960 unbound_vars: &HashSet<Ty<'tcx>>,
1961 default_map: &FnvHashMap<&Ty<'tcx>, type_variable::Default<'tcx>>,
1963 -> Option<type_variable::Default<'tcx>> {
1964 use middle::ty::error::UnconstrainedNumeric::Neither;
1965 use middle::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1967 // Ensure that we apply the conflicting default first
1968 let mut unbound_tyvars = Vec::with_capacity(unbound_vars.len() + 1);
1969 unbound_tyvars.push(conflict);
1970 unbound_tyvars.extend(unbound_vars.iter());
1972 let mut result = None;
1973 // We run the same code as above applying defaults in order, this time when
1974 // we find the conflict we just return it for error reporting above.
1976 // We also run this inside snapshot that never commits so we can do error
1977 // reporting for more then one conflict.
1978 for ty in &unbound_tyvars {
1979 if self.infcx().type_var_diverges(ty) {
1980 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1982 match self.infcx().type_is_unconstrained_numeric(ty) {
1983 UnconstrainedInt => {
1984 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
1986 UnconstrainedFloat => {
1987 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
1990 if let Some(default) = default_map.get(ty) {
1991 let default = default.clone();
1992 match infer::mk_eqty(self.infcx(), false,
1993 TypeOrigin::Misc(default.origin_span),
1997 result = Some(default);
2009 fn select_all_obligations_or_error(&self) {
2010 debug!("select_all_obligations_or_error");
2012 // upvar inference should have ensured that all deferred call
2013 // resolutions are handled by now.
2014 assert!(self.inh.deferred_call_resolutions.borrow().is_empty());
2016 self.select_all_obligations_and_apply_defaults();
2018 let mut fulfillment_cx = self.inh.infcx.fulfillment_cx.borrow_mut();
2019 match fulfillment_cx.select_all_or_error(self.infcx()) {
2021 Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
2025 /// Select as many obligations as we can at present.
2026 fn select_obligations_where_possible(&self) {
2028 self.inh.infcx.fulfillment_cx
2030 .select_where_possible(self.infcx())
2033 Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
2037 /// Try to select any fcx obligation that we haven't tried yet, in an effort
2038 /// to improve inference. You could just call
2039 /// `select_obligations_where_possible` except that it leads to repeated
2041 fn select_new_obligations(&self) {
2043 self.inh.infcx.fulfillment_cx
2045 .select_new_obligations(self.infcx())
2048 Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
2054 impl<'a, 'tcx> RegionScope for FnCtxt<'a, 'tcx> {
2055 fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> {
2056 Some(self.base_object_lifetime_default(span))
2059 fn base_object_lifetime_default(&self, span: Span) -> ty::Region {
2060 // RFC #599 specifies that object lifetime defaults take
2061 // precedence over other defaults. But within a fn body we
2062 // don't have a *default* region, rather we use inference to
2063 // find the *correct* region, which is strictly more general
2064 // (and anyway, within a fn body the right region may not even
2065 // be something the user can write explicitly, since it might
2066 // be some expression).
2067 self.infcx().next_region_var(infer::MiscVariable(span))
2070 fn anon_regions(&self, span: Span, count: usize)
2071 -> Result<Vec<ty::Region>, Option<Vec<ElisionFailureInfo>>> {
2072 Ok((0..count).map(|_| {
2073 self.infcx().next_region_var(infer::MiscVariable(span))
2078 /// Whether `autoderef` requires types to resolve.
2079 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
2080 pub enum UnresolvedTypeAction {
2081 /// Produce an error and return `TyError` whenever a type cannot
2082 /// be resolved (i.e. it is `TyInfer`).
2084 /// Go on without emitting any errors, and return the unresolved
2085 /// type. Useful for probing, e.g. in coercions.
2089 /// Executes an autoderef loop for the type `t`. At each step, invokes `should_stop` to decide
2090 /// whether to terminate the loop. Returns the final type and number of derefs that it performed.
2092 /// Note: this method does not modify the adjustments table. The caller is responsible for
2093 /// inserting an AutoAdjustment record into the `fcx` using one of the suitable methods.
2094 pub fn autoderef<'a, 'tcx, T, F>(fcx: &FnCtxt<'a, 'tcx>,
2097 opt_expr: Option<&hir::Expr>,
2098 unresolved_type_action: UnresolvedTypeAction,
2099 mut lvalue_pref: LvaluePreference,
2101 -> (Ty<'tcx>, usize, Option<T>)
2102 where F: FnMut(Ty<'tcx>, usize) -> Option<T>,
2104 debug!("autoderef(base_ty={:?}, opt_expr={:?}, lvalue_pref={:?})",
2109 let mut t = base_ty;
2110 for autoderefs in 0..fcx.tcx().sess.recursion_limit.get() {
2111 let resolved_t = match unresolved_type_action {
2112 UnresolvedTypeAction::Error => {
2113 structurally_resolved_type(fcx, sp, t)
2115 UnresolvedTypeAction::Ignore => {
2116 // We can continue even when the type cannot be resolved
2117 // (i.e. it is an inference variable) because `Ty::builtin_deref`
2118 // and `try_overloaded_deref` both simply return `None`
2119 // in such a case without producing spurious errors.
2120 fcx.resolve_type_vars_if_possible(t)
2123 if resolved_t.references_error() {
2124 return (resolved_t, autoderefs, None);
2127 match should_stop(resolved_t, autoderefs) {
2128 Some(x) => return (resolved_t, autoderefs, Some(x)),
2132 // Otherwise, deref if type is derefable:
2133 let mt = match resolved_t.builtin_deref(false, lvalue_pref) {
2134 Some(mt) => Some(mt),
2137 opt_expr.map(|expr| MethodCall::autoderef(expr.id, autoderefs as u32));
2139 // Super subtle: it might seem as though we should
2140 // pass `opt_expr` to `try_overloaded_deref`, so that
2141 // the (implicit) autoref of using an overloaded deref
2142 // would get added to the adjustment table. However we
2143 // do not do that, because it's kind of a
2144 // "meta-adjustment" -- instead, we just leave it
2145 // unrecorded and know that there "will be" an
2146 // autoref. regionck and other bits of the code base,
2147 // when they encounter an overloaded autoderef, have
2148 // to do some reconstructive surgery. This is a pretty
2149 // complex mess that is begging for a proper MIR.
2150 try_overloaded_deref(fcx, sp, method_call, None, resolved_t, lvalue_pref)
2156 if mt.mutbl == hir::MutImmutable {
2157 lvalue_pref = NoPreference;
2160 None => return (resolved_t, autoderefs, None)
2164 // We've reached the recursion limit, error gracefully.
2165 span_err!(fcx.tcx().sess, sp, E0055,
2166 "reached the recursion limit while auto-dereferencing {:?}",
2168 (fcx.tcx().types.err, 0, None)
2171 fn try_overloaded_deref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2173 method_call: Option<MethodCall>,
2174 base_expr: Option<&hir::Expr>,
2176 lvalue_pref: LvaluePreference)
2177 -> Option<ty::TypeAndMut<'tcx>>
2179 // Try DerefMut first, if preferred.
2180 let method = match (lvalue_pref, fcx.tcx().lang_items.deref_mut_trait()) {
2181 (PreferMutLvalue, Some(trait_did)) => {
2182 method::lookup_in_trait(fcx, span, base_expr,
2183 token::intern("deref_mut"), trait_did,
2189 // Otherwise, fall back to Deref.
2190 let method = match (method, fcx.tcx().lang_items.deref_trait()) {
2191 (None, Some(trait_did)) => {
2192 method::lookup_in_trait(fcx, span, base_expr,
2193 token::intern("deref"), trait_did,
2196 (method, _) => method
2199 make_overloaded_lvalue_return_type(fcx, method_call, method)
2202 /// For the overloaded lvalue expressions (`*x`, `x[3]`), the trait returns a type of `&T`, but the
2203 /// actual type we assign to the *expression* is `T`. So this function just peels off the return
2204 /// type by one layer to yield `T`. It also inserts the `method-callee` into the method map.
2205 fn make_overloaded_lvalue_return_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2206 method_call: Option<MethodCall>,
2207 method: Option<MethodCallee<'tcx>>)
2208 -> Option<ty::TypeAndMut<'tcx>>
2212 // extract method return type, which will be &T;
2213 // all LB regions should have been instantiated during method lookup
2214 let ret_ty = method.ty.fn_ret();
2215 let ret_ty = fcx.tcx().no_late_bound_regions(&ret_ty).unwrap().unwrap();
2217 if let Some(method_call) = method_call {
2218 fcx.inh.tables.borrow_mut().method_map.insert(method_call, method);
2221 // method returns &T, but the type as visible to user is T, so deref
2222 ret_ty.builtin_deref(true, NoPreference)
2228 fn lookup_indexing<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2230 base_expr: &'tcx hir::Expr,
2233 lvalue_pref: LvaluePreference)
2234 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2236 // FIXME(#18741) -- this is almost but not quite the same as the
2237 // autoderef that normal method probing does. They could likely be
2240 let (ty, autoderefs, final_mt) = autoderef(fcx,
2244 UnresolvedTypeAction::Error,
2247 try_index_step(fcx, MethodCall::expr(expr.id), expr, base_expr,
2248 adj_ty, idx, false, lvalue_pref, idx_ty)
2251 if final_mt.is_some() {
2255 // After we have fully autoderef'd, if the resulting type is [T; n], then
2256 // do a final unsized coercion to yield [T].
2257 if let ty::TyArray(element_ty, _) = ty.sty {
2258 let adjusted_ty = fcx.tcx().mk_slice(element_ty);
2259 try_index_step(fcx, MethodCall::expr(expr.id), expr, base_expr,
2260 adjusted_ty, autoderefs, true, lvalue_pref, idx_ty)
2266 /// To type-check `base_expr[index_expr]`, we progressively autoderef (and otherwise adjust)
2267 /// `base_expr`, looking for a type which either supports builtin indexing or overloaded indexing.
2268 /// This loop implements one step in that search; the autoderef loop is implemented by
2269 /// `lookup_indexing`.
2270 fn try_index_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2271 method_call: MethodCall,
2273 base_expr: &'tcx hir::Expr,
2274 adjusted_ty: Ty<'tcx>,
2277 lvalue_pref: LvaluePreference,
2279 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2281 let tcx = fcx.tcx();
2282 debug!("try_index_step(expr={:?}, base_expr.id={:?}, adjusted_ty={:?}, \
2283 autoderefs={}, unsize={}, index_ty={:?})",
2291 let input_ty = fcx.infcx().next_ty_var();
2293 // First, try built-in indexing.
2294 match (adjusted_ty.builtin_index(), &index_ty.sty) {
2295 (Some(ty), &ty::TyUint(ast::TyUs)) | (Some(ty), &ty::TyInfer(ty::IntVar(_))) => {
2296 debug!("try_index_step: success, using built-in indexing");
2297 // If we had `[T; N]`, we should've caught it before unsizing to `[T]`.
2299 fcx.write_autoderef_adjustment(base_expr.id, autoderefs);
2300 return Some((tcx.types.usize, ty));
2305 // Try `IndexMut` first, if preferred.
2306 let method = match (lvalue_pref, tcx.lang_items.index_mut_trait()) {
2307 (PreferMutLvalue, Some(trait_did)) => {
2308 method::lookup_in_trait_adjusted(fcx,
2311 token::intern("index_mut"),
2316 Some(vec![input_ty]))
2321 // Otherwise, fall back to `Index`.
2322 let method = match (method, tcx.lang_items.index_trait()) {
2323 (None, Some(trait_did)) => {
2324 method::lookup_in_trait_adjusted(fcx,
2327 token::intern("index"),
2332 Some(vec![input_ty]))
2334 (method, _) => method,
2337 // If some lookup succeeds, write callee into table and extract index/element
2338 // type from the method signature.
2339 // If some lookup succeeded, install method in table
2340 method.and_then(|method| {
2341 debug!("try_index_step: success, using overloaded indexing");
2342 make_overloaded_lvalue_return_type(fcx, Some(method_call), Some(method)).
2343 map(|ret| (input_ty, ret.ty))
2347 fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2349 method_fn_ty: Ty<'tcx>,
2350 callee_expr: &'tcx hir::Expr,
2351 args_no_rcvr: &'tcx [P<hir::Expr>],
2352 tuple_arguments: TupleArgumentsFlag,
2353 expected: Expectation<'tcx>)
2354 -> ty::FnOutput<'tcx> {
2355 if method_fn_ty.references_error() {
2356 let err_inputs = err_args(fcx.tcx(), args_no_rcvr.len());
2358 let err_inputs = match tuple_arguments {
2359 DontTupleArguments => err_inputs,
2360 TupleArguments => vec![fcx.tcx().mk_tup(err_inputs)],
2363 check_argument_types(fcx,
2370 ty::FnConverging(fcx.tcx().types.err)
2372 match method_fn_ty.sty {
2373 ty::TyBareFn(_, ref fty) => {
2374 // HACK(eddyb) ignore self in the definition (see above).
2375 let expected_arg_tys = expected_types_for_fn_args(fcx,
2379 &fty.sig.0.inputs[1..]);
2380 check_argument_types(fcx,
2382 &fty.sig.0.inputs[1..],
2383 &expected_arg_tys[..],
2390 fcx.tcx().sess.span_bug(callee_expr.span,
2391 "method without bare fn type");
2397 /// Generic function that factors out common logic from function calls, method calls and overloaded
2399 fn check_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2401 fn_inputs: &[Ty<'tcx>],
2402 expected_arg_tys: &[Ty<'tcx>],
2403 args: &'tcx [P<hir::Expr>],
2405 tuple_arguments: TupleArgumentsFlag) {
2406 let tcx = fcx.ccx.tcx;
2408 // Grab the argument types, supplying fresh type variables
2409 // if the wrong number of arguments were supplied
2410 let supplied_arg_count = if tuple_arguments == DontTupleArguments {
2416 // All the input types from the fn signature must outlive the call
2417 // so as to validate implied bounds.
2418 for &fn_input_ty in fn_inputs {
2419 fcx.register_wf_obligation(fn_input_ty, sp, traits::MiscObligation);
2422 let mut expected_arg_tys = expected_arg_tys;
2423 let expected_arg_count = fn_inputs.len();
2424 let formal_tys = if tuple_arguments == TupleArguments {
2425 let tuple_type = structurally_resolved_type(fcx, sp, fn_inputs[0]);
2426 match tuple_type.sty {
2427 ty::TyTuple(ref arg_types) => {
2428 if arg_types.len() != args.len() {
2429 span_err!(tcx.sess, sp, E0057,
2430 "this function takes {} parameter{} but {} parameter{} supplied",
2432 if arg_types.len() == 1 {""} else {"s"},
2434 if args.len() == 1 {" was"} else {"s were"});
2435 expected_arg_tys = &[];
2436 err_args(fcx.tcx(), args.len())
2438 expected_arg_tys = match expected_arg_tys.get(0) {
2439 Some(&ty) => match ty.sty {
2440 ty::TyTuple(ref tys) => &**tys,
2445 (*arg_types).clone()
2449 span_err!(tcx.sess, sp, E0059,
2450 "cannot use call notation; the first type parameter \
2451 for the function trait is neither a tuple nor unit");
2452 expected_arg_tys = &[];
2453 err_args(fcx.tcx(), args.len())
2456 } else if expected_arg_count == supplied_arg_count {
2458 } else if variadic {
2459 if supplied_arg_count >= expected_arg_count {
2462 span_err!(tcx.sess, sp, E0060,
2463 "this function takes at least {} parameter{} \
2464 but {} parameter{} supplied",
2466 if expected_arg_count == 1 {""} else {"s"},
2468 if supplied_arg_count == 1 {" was"} else {"s were"});
2469 expected_arg_tys = &[];
2470 err_args(fcx.tcx(), supplied_arg_count)
2473 span_err!(tcx.sess, sp, E0061,
2474 "this function takes {} parameter{} but {} parameter{} supplied",
2476 if expected_arg_count == 1 {""} else {"s"},
2478 if supplied_arg_count == 1 {" was"} else {"s were"});
2479 expected_arg_tys = &[];
2480 err_args(fcx.tcx(), supplied_arg_count)
2483 debug!("check_argument_types: formal_tys={:?}",
2484 formal_tys.iter().map(|t| fcx.infcx().ty_to_string(*t)).collect::<Vec<String>>());
2486 // Check the arguments.
2487 // We do this in a pretty awful way: first we typecheck any arguments
2488 // that are not anonymous functions, then we typecheck the anonymous
2489 // functions. This is so that we have more information about the types
2490 // of arguments when we typecheck the functions. This isn't really the
2491 // right way to do this.
2492 let xs = [false, true];
2493 let mut any_diverges = false; // has any of the arguments diverged?
2494 let mut warned = false; // have we already warned about unreachable code?
2495 for check_blocks in &xs {
2496 let check_blocks = *check_blocks;
2497 debug!("check_blocks={}", check_blocks);
2499 // More awful hacks: before we check argument types, try to do
2500 // an "opportunistic" vtable resolution of any trait bounds on
2501 // the call. This helps coercions.
2503 fcx.select_new_obligations();
2506 // For variadic functions, we don't have a declared type for all of
2507 // the arguments hence we only do our usual type checking with
2508 // the arguments who's types we do know.
2509 let t = if variadic {
2511 } else if tuple_arguments == TupleArguments {
2516 for (i, arg) in args.iter().take(t).enumerate() {
2517 if any_diverges && !warned {
2521 .add_lint(lint::builtin::UNREACHABLE_CODE,
2524 "unreachable expression".to_string());
2527 let is_block = match arg.node {
2528 hir::ExprClosure(..) => true,
2532 if is_block == check_blocks {
2533 debug!("checking the argument");
2534 let formal_ty = formal_tys[i];
2536 // The special-cased logic below has three functions:
2537 // 1. Provide as good of an expected type as possible.
2538 let expected = expected_arg_tys.get(i).map(|&ty| {
2539 Expectation::rvalue_hint(fcx.tcx(), ty)
2542 check_expr_with_unifier(fcx,
2544 expected.unwrap_or(ExpectHasType(formal_ty)),
2546 // 2. Coerce to the most detailed type that could be coerced
2547 // to, which is `expected_ty` if `rvalue_hint` returns an
2548 // `ExprHasType(expected_ty)`, or the `formal_ty` otherwise.
2549 let coerce_ty = expected.and_then(|e| e.only_has_type(fcx));
2550 demand::coerce(fcx, arg.span, coerce_ty.unwrap_or(formal_ty), &**arg);
2552 // 3. Relate the expected type and the formal one,
2553 // if the expected type was used for the coercion.
2554 coerce_ty.map(|ty| demand::suptype(fcx, arg.span, formal_ty, ty));
2558 if let Some(&arg_ty) = fcx.inh.tables.borrow().node_types.get(&arg.id) {
2559 any_diverges = any_diverges || fcx.infcx().type_var_diverges(arg_ty);
2562 if any_diverges && !warned {
2563 let parent = fcx.ccx.tcx.map.get_parent_node(args[0].id);
2567 .add_lint(lint::builtin::UNREACHABLE_CODE,
2570 "unreachable call".to_string());
2576 // We also need to make sure we at least write the ty of the other
2577 // arguments which we skipped above.
2579 for arg in args.iter().skip(expected_arg_count) {
2580 check_expr(fcx, &**arg);
2582 // There are a few types which get autopromoted when passed via varargs
2583 // in C but we just error out instead and require explicit casts.
2584 let arg_ty = structurally_resolved_type(fcx, arg.span,
2585 fcx.expr_ty(&**arg));
2587 ty::TyFloat(ast::TyF32) => {
2588 fcx.type_error_message(arg.span,
2590 format!("can't pass an {} to variadic \
2591 function, cast to c_double", t)
2594 ty::TyInt(ast::TyI8) | ty::TyInt(ast::TyI16) | ty::TyBool => {
2595 fcx.type_error_message(arg.span, |t| {
2596 format!("can't pass {} to variadic \
2597 function, cast to c_int",
2601 ty::TyUint(ast::TyU8) | ty::TyUint(ast::TyU16) => {
2602 fcx.type_error_message(arg.span, |t| {
2603 format!("can't pass {} to variadic \
2604 function, cast to c_uint",
2614 // FIXME(#17596) Ty<'tcx> is incorrectly invariant w.r.t 'tcx.
2615 fn err_args<'tcx>(tcx: &ty::ctxt<'tcx>, len: usize) -> Vec<Ty<'tcx>> {
2616 (0..len).map(|_| tcx.types.err).collect()
2619 fn write_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2620 call_expr: &hir::Expr,
2621 output: ty::FnOutput<'tcx>) {
2622 fcx.write_ty(call_expr.id, match output {
2623 ty::FnConverging(output_ty) => output_ty,
2624 ty::FnDiverging => fcx.infcx().next_diverging_ty_var()
2628 // AST fragment checking
2629 fn check_lit<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2631 expected: Expectation<'tcx>)
2634 let tcx = fcx.ccx.tcx;
2637 ast::LitStr(..) => tcx.mk_static_str(),
2638 ast::LitByteStr(ref v) => {
2639 tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic),
2640 tcx.mk_array(tcx.types.u8, v.len()))
2642 ast::LitByte(_) => tcx.types.u8,
2643 ast::LitChar(_) => tcx.types.char,
2644 ast::LitInt(_, ast::SignedIntLit(t, _)) => tcx.mk_mach_int(t),
2645 ast::LitInt(_, ast::UnsignedIntLit(t)) => tcx.mk_mach_uint(t),
2646 ast::LitInt(_, ast::UnsuffixedIntLit(_)) => {
2647 let opt_ty = expected.to_option(fcx).and_then(|ty| {
2649 ty::TyInt(_) | ty::TyUint(_) => Some(ty),
2650 ty::TyChar => Some(tcx.types.u8),
2651 ty::TyRawPtr(..) => Some(tcx.types.usize),
2652 ty::TyBareFn(..) => Some(tcx.types.usize),
2656 opt_ty.unwrap_or_else(
2657 || tcx.mk_int_var(fcx.infcx().next_int_var_id()))
2659 ast::LitFloat(_, t) => tcx.mk_mach_float(t),
2660 ast::LitFloatUnsuffixed(_) => {
2661 let opt_ty = expected.to_option(fcx).and_then(|ty| {
2663 ty::TyFloat(_) => Some(ty),
2667 opt_ty.unwrap_or_else(
2668 || tcx.mk_float_var(fcx.infcx().next_float_var_id()))
2670 ast::LitBool(_) => tcx.types.bool
2674 pub fn check_expr_has_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2675 expr: &'tcx hir::Expr,
2676 expected: Ty<'tcx>) {
2677 check_expr_with_unifier(
2678 fcx, expr, ExpectHasType(expected), NoPreference,
2679 || demand::suptype(fcx, expr.span, expected, fcx.expr_ty(expr)));
2682 fn check_expr_coercable_to_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2683 expr: &'tcx hir::Expr,
2684 expected: Ty<'tcx>) {
2685 check_expr_with_unifier(
2686 fcx, expr, ExpectHasType(expected), NoPreference,
2687 || demand::coerce(fcx, expr.span, expected, expr));
2690 fn check_expr_with_hint<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, expr: &'tcx hir::Expr,
2691 expected: Ty<'tcx>) {
2692 check_expr_with_unifier(
2693 fcx, expr, ExpectHasType(expected), NoPreference,
2697 fn check_expr_with_expectation<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2698 expr: &'tcx hir::Expr,
2699 expected: Expectation<'tcx>) {
2700 check_expr_with_unifier(
2701 fcx, expr, expected, NoPreference,
2705 fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2706 expr: &'tcx hir::Expr,
2707 expected: Expectation<'tcx>,
2708 lvalue_pref: LvaluePreference)
2710 check_expr_with_unifier(fcx, expr, expected, lvalue_pref, || ())
2713 fn check_expr<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, expr: &'tcx hir::Expr) {
2714 check_expr_with_unifier(fcx, expr, NoExpectation, NoPreference, || ())
2717 fn check_expr_with_lvalue_pref<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, expr: &'tcx hir::Expr,
2718 lvalue_pref: LvaluePreference) {
2719 check_expr_with_unifier(fcx, expr, NoExpectation, lvalue_pref, || ())
2722 // determine the `self` type, using fresh variables for all variables
2723 // declared on the impl declaration e.g., `impl<A,B> for Vec<(A,B)>`
2724 // would return ($0, $1) where $0 and $1 are freshly instantiated type
2726 pub fn impl_self_ty<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2727 span: Span, // (potential) receiver for this impl
2729 -> TypeAndSubsts<'tcx> {
2730 let tcx = fcx.tcx();
2732 let ity = tcx.lookup_item_type(did);
2733 let (tps, rps, raw_ty) =
2734 (ity.generics.types.get_slice(subst::TypeSpace),
2735 ity.generics.regions.get_slice(subst::TypeSpace),
2738 debug!("impl_self_ty: tps={:?} rps={:?} raw_ty={:?}", tps, rps, raw_ty);
2740 let rps = fcx.inh.infcx.region_vars_for_defs(span, rps);
2741 let mut substs = subst::Substs::new(
2742 VecPerParamSpace::empty(),
2743 VecPerParamSpace::new(rps, Vec::new(), Vec::new()));
2744 fcx.inh.infcx.type_vars_for_defs(span, ParamSpace::TypeSpace, &mut substs, tps);
2745 let substd_ty = fcx.instantiate_type_scheme(span, &substs, &raw_ty);
2747 TypeAndSubsts { substs: substs, ty: substd_ty }
2750 /// Controls whether the arguments are tupled. This is used for the call
2753 /// Tupling means that all call-side arguments are packed into a tuple and
2754 /// passed as a single parameter. For example, if tupling is enabled, this
2757 /// fn f(x: (isize, isize))
2759 /// Can be called as:
2766 #[derive(Clone, Eq, PartialEq)]
2767 enum TupleArgumentsFlag {
2772 /// Unifies the return type with the expected type early, for more coercions
2773 /// and forward type information on the argument expressions.
2774 fn expected_types_for_fn_args<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2776 expected_ret: Expectation<'tcx>,
2777 formal_ret: ty::FnOutput<'tcx>,
2778 formal_args: &[Ty<'tcx>])
2780 let expected_args = expected_ret.only_has_type(fcx).and_then(|ret_ty| {
2781 if let ty::FnConverging(formal_ret_ty) = formal_ret {
2782 fcx.infcx().commit_regions_if_ok(|| {
2783 // Attempt to apply a subtyping relationship between the formal
2784 // return type (likely containing type variables if the function
2785 // is polymorphic) and the expected return type.
2786 // No argument expectations are produced if unification fails.
2787 let origin = TypeOrigin::Misc(call_span);
2788 let ures = fcx.infcx().sub_types(false, origin, formal_ret_ty, ret_ty);
2789 // FIXME(#15760) can't use try! here, FromError doesn't default
2790 // to identity so the resulting type is not constrained.
2791 if let Err(e) = ures {
2795 // Record all the argument types, with the substitutions
2796 // produced from the above subtyping unification.
2797 Ok(formal_args.iter().map(|ty| {
2798 fcx.infcx().resolve_type_vars_if_possible(ty)
2804 }).unwrap_or(vec![]);
2805 debug!("expected_types_for_fn_args(formal={:?} -> {:?}, expected={:?} -> {:?})",
2806 formal_args, formal_ret,
2807 expected_args, expected_ret);
2812 /// If an expression has any sub-expressions that result in a type error,
2813 /// inspecting that expression's type with `ty.references_error()` will return
2814 /// true. Likewise, if an expression is known to diverge, inspecting its
2815 /// type with `ty::type_is_bot` will return true (n.b.: since Rust is
2816 /// strict, _|_ can appear in the type of an expression that does not,
2817 /// itself, diverge: for example, fn() -> _|_.)
2818 /// Note that inspecting a type's structure *directly* may expose the fact
2819 /// that there are actually multiple representations for `TyError`, so avoid
2820 /// that when err needs to be handled differently.
2821 fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
2822 expr: &'tcx hir::Expr,
2823 expected: Expectation<'tcx>,
2824 lvalue_pref: LvaluePreference,
2828 debug!(">> typechecking: expr={:?} expected={:?}",
2831 // Checks a method call.
2832 fn check_method_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2833 expr: &'tcx hir::Expr,
2834 method_name: Spanned<ast::Name>,
2835 args: &'tcx [P<hir::Expr>],
2837 expected: Expectation<'tcx>,
2838 lvalue_pref: LvaluePreference) {
2839 let rcvr = &*args[0];
2840 check_expr_with_lvalue_pref(fcx, &*rcvr, lvalue_pref);
2842 // no need to check for bot/err -- callee does that
2843 let expr_t = structurally_resolved_type(fcx,
2845 fcx.expr_ty(&*rcvr));
2847 let tps = tps.iter().map(|ast_ty| fcx.to_ty(&**ast_ty)).collect::<Vec<_>>();
2848 let fn_ty = match method::lookup(fcx,
2856 let method_ty = method.ty;
2857 let method_call = MethodCall::expr(expr.id);
2858 fcx.inh.tables.borrow_mut().method_map.insert(method_call, method);
2862 method::report_error(fcx, method_name.span, expr_t,
2863 method_name.node, Some(rcvr), error);
2864 fcx.write_error(expr.id);
2869 // Call the generic checker.
2870 let ret_ty = check_method_argument_types(fcx,
2878 write_call(fcx, expr, ret_ty);
2881 // A generic function for checking the then and else in an if
2883 fn check_then_else<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2884 cond_expr: &'tcx hir::Expr,
2885 then_blk: &'tcx hir::Block,
2886 opt_else_expr: Option<&'tcx hir::Expr>,
2889 expected: Expectation<'tcx>) {
2890 check_expr_has_type(fcx, cond_expr, fcx.tcx().types.bool);
2892 let expected = expected.adjust_for_branches(fcx);
2893 check_block_with_expected(fcx, then_blk, expected);
2894 let then_ty = fcx.node_ty(then_blk.id);
2896 let branches_ty = match opt_else_expr {
2897 Some(ref else_expr) => {
2898 check_expr_with_expectation(fcx, &**else_expr, expected);
2899 let else_ty = fcx.expr_ty(&**else_expr);
2900 infer::common_supertype(fcx.infcx(),
2901 TypeOrigin::IfExpression(sp),
2907 infer::common_supertype(fcx.infcx(),
2908 TypeOrigin::IfExpressionWithNoElse(sp),
2915 let cond_ty = fcx.expr_ty(cond_expr);
2916 let if_ty = if cond_ty.references_error() {
2922 fcx.write_ty(id, if_ty);
2925 // Check field access expressions
2926 fn check_field<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
2927 expr: &'tcx hir::Expr,
2928 lvalue_pref: LvaluePreference,
2929 base: &'tcx hir::Expr,
2930 field: &Spanned<ast::Name>) {
2931 let tcx = fcx.ccx.tcx;
2932 check_expr_with_lvalue_pref(fcx, base, lvalue_pref);
2933 let expr_t = structurally_resolved_type(fcx, expr.span,
2935 // FIXME(eddyb) #12808 Integrate privacy into this auto-deref loop.
2936 let (_, autoderefs, field_ty) = autoderef(fcx,
2940 UnresolvedTypeAction::Error,
2944 ty::TyStruct(base_def, substs) => {
2945 debug!("struct named {:?}", base_t);
2946 base_def.struct_variant()
2947 .find_field_named(field.node)
2948 .map(|f| fcx.field_ty(expr.span, f, substs))
2955 fcx.write_ty(expr.id, field_ty);
2956 fcx.write_autoderef_adjustment(base.id, autoderefs);
2962 if method::exists(fcx, field.span, field.node, expr_t, expr.id) {
2963 fcx.type_error_message(
2966 format!("attempted to take value of method `{}` on type \
2967 `{}`", field.node, actual)
2971 tcx.sess.fileline_help(field.span,
2972 "maybe a `()` to call it is missing? \
2973 If not, try an anonymous function");
2975 fcx.type_error_message(
2978 format!("attempted access of field `{}` on \
2979 type `{}`, but no field with that \
2985 if let ty::TyStruct(def, _) = expr_t.sty {
2986 suggest_field_names(def.struct_variant(), field, tcx, vec![]);
2990 fcx.write_error(expr.id);
2993 // displays hints about the closest matches in field names
2994 fn suggest_field_names<'tcx>(variant: ty::VariantDef<'tcx>,
2995 field: &Spanned<ast::Name>,
2996 tcx: &ty::ctxt<'tcx>,
2997 skip : Vec<InternedString>) {
2998 let name = field.node.as_str();
2999 // only find fits with at least one matching letter
3000 let mut best_dist = name.len();
3001 let mut best = None;
3002 for elem in &variant.fields {
3003 let n = elem.name.as_str();
3004 // ignore already set fields
3005 if skip.iter().any(|x| *x == n) {
3008 // ignore private fields from non-local crates
3009 if variant.did.krate != LOCAL_CRATE && elem.vis != Visibility::Public {
3012 let dist = lev_distance(&n, &name);
3013 if dist < best_dist {
3018 if let Some(n) = best {
3019 tcx.sess.span_help(field.span,
3020 &format!("did you mean `{}`?", n));
3024 // Check tuple index expressions
3025 fn check_tup_field<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
3026 expr: &'tcx hir::Expr,
3027 lvalue_pref: LvaluePreference,
3028 base: &'tcx hir::Expr,
3029 idx: codemap::Spanned<usize>) {
3030 check_expr_with_lvalue_pref(fcx, base, lvalue_pref);
3031 let expr_t = structurally_resolved_type(fcx, expr.span,
3033 let mut tuple_like = false;
3034 // FIXME(eddyb) #12808 Integrate privacy into this auto-deref loop.
3035 let (_, autoderefs, field_ty) = autoderef(fcx,
3039 UnresolvedTypeAction::Error,
3043 ty::TyStruct(base_def, substs) => {
3044 tuple_like = base_def.struct_variant().is_tuple_struct();
3046 debug!("tuple struct named {:?}", base_t);
3047 base_def.struct_variant()
3050 .map(|f| fcx.field_ty(expr.span, f, substs))
3055 ty::TyTuple(ref v) => {
3057 if idx.node < v.len() { Some(v[idx.node]) } else { None }
3064 fcx.write_ty(expr.id, field_ty);
3065 fcx.write_autoderef_adjustment(base.id, autoderefs);
3070 fcx.type_error_message(
3074 format!("attempted out-of-bounds tuple index `{}` on \
3079 format!("attempted tuple index `{}` on type `{}`, but the \
3080 type was not a tuple or tuple struct",
3087 fcx.write_error(expr.id);
3090 fn report_unknown_field<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
3092 variant: ty::VariantDef<'tcx>,
3094 skip_fields: &[hir::Field]) {
3095 fcx.type_error_message(
3097 |actual| if let ty::TyEnum(..) = ty.sty {
3098 format!("struct variant `{}::{}` has no field named `{}`",
3099 actual, variant.name.as_str(), field.name.node)
3101 format!("structure `{}` has no field named `{}`",
3102 actual, field.name.node)
3106 // prevent all specified fields from being suggested
3107 let skip_fields = skip_fields.iter().map(|ref x| x.name.node.as_str());
3108 suggest_field_names(variant, &field.name, fcx.tcx(), skip_fields.collect());
3111 fn check_expr_struct_fields<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
3114 variant: ty::VariantDef<'tcx>,
3115 ast_fields: &'tcx [hir::Field],
3116 check_completeness: bool) {
3117 let tcx = fcx.ccx.tcx;
3118 let substs = match adt_ty.sty {
3119 ty::TyStruct(_, substs) | ty::TyEnum(_, substs) => substs,
3120 _ => tcx.sess.span_bug(span, "non-ADT passed to check_expr_struct_fields")
3123 let mut remaining_fields = FnvHashMap();
3124 for field in &variant.fields {
3125 remaining_fields.insert(field.name, field);
3128 let mut error_happened = false;
3130 // Typecheck each field.
3131 for field in ast_fields {
3132 let expected_field_type;
3134 if let Some(v_field) = remaining_fields.remove(&field.name.node) {
3135 expected_field_type = fcx.field_ty(field.span, v_field, substs);
3137 error_happened = true;
3138 expected_field_type = tcx.types.err;
3139 if let Some(_) = variant.find_field_named(field.name.node) {
3140 span_err!(fcx.tcx().sess, field.name.span, E0062,
3141 "field `{}` specified more than once",
3144 report_unknown_field(fcx, adt_ty, variant, field, ast_fields);
3148 // Make sure to give a type to the field even if there's
3149 // an error, so we can continue typechecking
3150 check_expr_coercable_to_type(fcx, &*field.expr, expected_field_type);
3153 // Make sure the programmer specified all the fields.
3154 if check_completeness &&
3156 !remaining_fields.is_empty()
3158 span_err!(tcx.sess, span, E0063,
3159 "missing field{}: {}",
3160 if remaining_fields.len() == 1 {""} else {"s"},
3161 remaining_fields.keys()
3162 .map(|n| format!("`{}`", n))
3163 .collect::<Vec<_>>()
3169 fn check_struct_fields_on_error<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
3171 fields: &'tcx [hir::Field],
3172 base_expr: &'tcx Option<P<hir::Expr>>) {
3173 // Make sure to still write the types
3174 // otherwise we might ICE
3175 fcx.write_error(id);
3176 for field in fields {
3177 check_expr(fcx, &*field.expr);
3180 Some(ref base) => check_expr(fcx, &**base),
3185 fn check_expr_struct<'a, 'tcx>(fcx: &FnCtxt<'a,'tcx>,
3188 fields: &'tcx [hir::Field],
3189 base_expr: &'tcx Option<P<hir::Expr>>)
3191 let tcx = fcx.tcx();
3193 // Find the relevant variant
3194 let def = lookup_full_def(tcx, path.span, expr.id);
3195 let (adt, variant) = match fcx.def_struct_variant(def, path.span) {
3196 Some((adt, variant)) => (adt, variant),
3198 span_err!(fcx.tcx().sess, path.span, E0071,
3199 "`{}` does not name a structure",
3200 pprust::path_to_string(path));
3201 check_struct_fields_on_error(fcx, expr.id, fields, base_expr);
3206 let expr_ty = fcx.instantiate_type(def.def_id(), path);
3207 fcx.write_ty(expr.id, expr_ty);
3209 check_expr_struct_fields(fcx, expr_ty, expr.span, variant, fields,
3210 base_expr.is_none());
3212 if let &Some(ref base_expr) = base_expr {
3213 check_expr_has_type(fcx, base_expr, expr_ty);
3214 if adt.adt_kind() == ty::AdtKind::Enum {
3215 span_err!(tcx.sess, base_expr.span, E0436,
3216 "functional record update syntax requires a struct");
3221 type ExprCheckerWithTy = fn(&FnCtxt, &hir::Expr, Ty);
3223 let tcx = fcx.ccx.tcx;
3226 hir::ExprBox(ref subexpr) => {
3227 let expected_inner = expected.to_option(fcx).map_or(NoExpectation, |ty| {
3229 ty::TyBox(ty) => Expectation::rvalue_hint(tcx, ty),
3233 check_expr_with_expectation(fcx, subexpr, expected_inner);
3234 let referent_ty = fcx.expr_ty(&**subexpr);
3235 fcx.write_ty(id, tcx.mk_box(referent_ty));
3238 hir::ExprLit(ref lit) => {
3239 let typ = check_lit(fcx, &**lit, expected);
3240 fcx.write_ty(id, typ);
3242 hir::ExprBinary(op, ref lhs, ref rhs) => {
3243 op::check_binop(fcx, expr, op, lhs, rhs);
3245 hir::ExprAssignOp(op, ref lhs, ref rhs) => {
3246 op::check_binop_assign(fcx, expr, op, lhs, rhs);
3248 hir::ExprUnary(unop, ref oprnd) => {
3249 let expected_inner = match unop {
3250 hir::UnNot | hir::UnNeg => {
3257 let lvalue_pref = match unop {
3258 hir::UnDeref => lvalue_pref,
3261 check_expr_with_expectation_and_lvalue_pref(
3262 fcx, &**oprnd, expected_inner, lvalue_pref);
3263 let mut oprnd_t = fcx.expr_ty(&**oprnd);
3265 if !oprnd_t.references_error() {
3268 oprnd_t = structurally_resolved_type(fcx, expr.span, oprnd_t);
3269 oprnd_t = match oprnd_t.builtin_deref(true, NoPreference) {
3271 None => match try_overloaded_deref(fcx, expr.span,
3272 Some(MethodCall::expr(expr.id)),
3273 Some(&**oprnd), oprnd_t, lvalue_pref) {
3276 fcx.type_error_message(expr.span, |actual| {
3277 format!("type `{}` cannot be \
3278 dereferenced", actual)
3286 oprnd_t = structurally_resolved_type(fcx, oprnd.span,
3288 if !(oprnd_t.is_integral() || oprnd_t.sty == ty::TyBool) {
3289 oprnd_t = op::check_user_unop(fcx, "!", "not",
3290 tcx.lang_items.not_trait(),
3291 expr, &**oprnd, oprnd_t, unop);
3295 oprnd_t = structurally_resolved_type(fcx, oprnd.span,
3297 if !(oprnd_t.is_integral() || oprnd_t.is_fp()) {
3298 oprnd_t = op::check_user_unop(fcx, "-", "neg",
3299 tcx.lang_items.neg_trait(),
3300 expr, &**oprnd, oprnd_t, unop);
3305 fcx.write_ty(id, oprnd_t);
3307 hir::ExprAddrOf(mutbl, ref oprnd) => {
3308 let hint = expected.only_has_type(fcx).map_or(NoExpectation, |ty| {
3310 ty::TyRef(_, ref mt) | ty::TyRawPtr(ref mt) => {
3311 if fcx.tcx().expr_is_lval(&**oprnd) {
3312 // Lvalues may legitimately have unsized types.
3313 // For example, dereferences of a fat pointer and
3314 // the last field of a struct can be unsized.
3315 ExpectHasType(mt.ty)
3317 Expectation::rvalue_hint(tcx, mt.ty)
3323 let lvalue_pref = LvaluePreference::from_mutbl(mutbl);
3324 check_expr_with_expectation_and_lvalue_pref(fcx,
3329 let tm = ty::TypeAndMut { ty: fcx.expr_ty(&**oprnd), mutbl: mutbl };
3330 let oprnd_t = if tm.ty.references_error() {
3333 // Note: at this point, we cannot say what the best lifetime
3334 // is to use for resulting pointer. We want to use the
3335 // shortest lifetime possible so as to avoid spurious borrowck
3336 // errors. Moreover, the longest lifetime will depend on the
3337 // precise details of the value whose address is being taken
3338 // (and how long it is valid), which we don't know yet until type
3339 // inference is complete.
3341 // Therefore, here we simply generate a region variable. The
3342 // region inferencer will then select the ultimate value.
3343 // Finally, borrowck is charged with guaranteeing that the
3344 // value whose address was taken can actually be made to live
3345 // as long as it needs to live.
3346 let region = fcx.infcx().next_region_var(infer::AddrOfRegion(expr.span));
3347 tcx.mk_ref(tcx.mk_region(region), tm)
3349 fcx.write_ty(id, oprnd_t);
3351 hir::ExprPath(ref maybe_qself, ref path) => {
3352 let opt_self_ty = maybe_qself.as_ref().map(|qself| {
3353 fcx.to_ty(&qself.ty)
3356 let path_res = if let Some(&d) = tcx.def_map.borrow().get(&id) {
3358 } else if let Some(hir::QSelf { position: 0, .. }) = *maybe_qself {
3359 // Create some fake resolution that can't possibly be a type.
3360 def::PathResolution {
3361 base_def: def::DefMod(tcx.map.local_def_id(ast::CRATE_NODE_ID)),
3362 last_private: LastMod(AllPublic),
3363 depth: path.segments.len()
3366 tcx.sess.span_bug(expr.span,
3367 &format!("unbound path {:?}", expr))
3370 if let Some((opt_ty, segments, def)) =
3371 resolve_ty_and_def_ufcs(fcx, path_res, opt_self_ty, path,
3372 expr.span, expr.id) {
3373 let (scheme, predicates) = type_scheme_and_predicates_for_def(fcx,
3376 instantiate_path(fcx,
3386 // We always require that the type provided as the value for
3387 // a type parameter outlives the moment of instantiation.
3388 fcx.opt_node_ty_substs(expr.id, |item_substs| {
3389 fcx.add_wf_bounds(&item_substs.substs, expr);
3392 hir::ExprInlineAsm(ref ia) => {
3393 for &(_, ref input) in &ia.inputs {
3394 check_expr(fcx, &**input);
3396 for &(_, ref out, _) in &ia.outputs {
3397 check_expr(fcx, &**out);
3401 hir::ExprBreak(_) => { fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); }
3402 hir::ExprAgain(_) => { fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); }
3403 hir::ExprRet(ref expr_opt) => {
3405 ty::FnConverging(result_type) => {
3408 if let Err(_) = fcx.mk_eqty(false, TypeOrigin::Misc(expr.span),
3409 result_type, fcx.tcx().mk_nil()) {
3410 span_err!(tcx.sess, expr.span, E0069,
3411 "`return;` in a function whose return type is \
3415 check_expr_coercable_to_type(fcx, &**e, result_type);
3419 ty::FnDiverging => {
3420 if let Some(ref e) = *expr_opt {
3421 check_expr(fcx, &**e);
3423 span_err!(tcx.sess, expr.span, E0166,
3424 "`return` in a function declared as diverging");
3427 fcx.write_ty(id, fcx.infcx().next_diverging_ty_var());
3429 hir::ExprAssign(ref lhs, ref rhs) => {
3430 check_expr_with_lvalue_pref(fcx, &**lhs, PreferMutLvalue);
3432 let tcx = fcx.tcx();
3433 if !tcx.expr_is_lval(&**lhs) {
3434 span_err!(tcx.sess, expr.span, E0070,
3435 "invalid left-hand side expression");
3438 let lhs_ty = fcx.expr_ty(&**lhs);
3439 check_expr_coercable_to_type(fcx, &**rhs, lhs_ty);
3440 let rhs_ty = fcx.expr_ty(&**rhs);
3442 fcx.require_expr_have_sized_type(&**lhs, traits::AssignmentLhsSized);
3444 if lhs_ty.references_error() || rhs_ty.references_error() {
3445 fcx.write_error(id);
3450 hir::ExprIf(ref cond, ref then_blk, ref opt_else_expr) => {
3451 check_then_else(fcx, &**cond, &**then_blk, opt_else_expr.as_ref().map(|e| &**e),
3452 id, expr.span, expected);
3454 hir::ExprWhile(ref cond, ref body, _) => {
3455 check_expr_has_type(fcx, &**cond, tcx.types.bool);
3456 check_block_no_value(fcx, &**body);
3457 let cond_ty = fcx.expr_ty(&**cond);
3458 let body_ty = fcx.node_ty(body.id);
3459 if cond_ty.references_error() || body_ty.references_error() {
3460 fcx.write_error(id);
3466 hir::ExprLoop(ref body, _) => {
3467 check_block_no_value(fcx, &**body);
3468 if !may_break(tcx, expr.id, &**body) {
3469 fcx.write_ty(id, fcx.infcx().next_diverging_ty_var());
3474 hir::ExprMatch(ref discrim, ref arms, match_src) => {
3475 _match::check_match(fcx, expr, &**discrim, arms, expected, match_src);
3477 hir::ExprClosure(capture, ref decl, ref body) => {
3478 closure::check_expr_closure(fcx, expr, capture, &**decl, &**body, expected);
3480 hir::ExprBlock(ref b) => {
3481 check_block_with_expected(fcx, &**b, expected);
3482 fcx.write_ty(id, fcx.node_ty(b.id));
3484 hir::ExprCall(ref callee, ref args) => {
3485 callee::check_call(fcx, expr, &**callee, &args[..], expected);
3487 // we must check that return type of called functions is WF:
3488 let ret_ty = fcx.expr_ty(expr);
3489 fcx.register_wf_obligation(ret_ty, expr.span, traits::MiscObligation);
3491 hir::ExprMethodCall(name, ref tps, ref args) => {
3492 check_method_call(fcx, expr, name, &args[..], &tps[..], expected, lvalue_pref);
3493 let arg_tys = args.iter().map(|a| fcx.expr_ty(&**a));
3494 let args_err = arg_tys.fold(false, |rest_err, a| rest_err || a.references_error());
3496 fcx.write_error(id);
3499 hir::ExprCast(ref e, ref t) => {
3500 if let hir::TyFixedLengthVec(_, ref count_expr) = t.node {
3501 check_expr_with_hint(fcx, &**count_expr, tcx.types.usize);
3504 // Find the type of `e`. Supply hints based on the type we are casting to,
3506 let t_cast = fcx.to_ty(t);
3507 let t_cast = structurally_resolved_type(fcx, expr.span, t_cast);
3508 check_expr_with_expectation(fcx, e, ExpectCastableToType(t_cast));
3509 let t_expr = fcx.expr_ty(e);
3511 // Eagerly check for some obvious errors.
3512 if t_expr.references_error() {
3513 fcx.write_error(id);
3514 } else if !fcx.type_is_known_to_be_sized(t_cast, expr.span) {
3515 report_cast_to_unsized_type(fcx, expr.span, t.span, e.span, t_cast, t_expr, id);
3517 // Write a type for the whole expression, assuming everything is going
3519 fcx.write_ty(id, t_cast);
3521 // Defer other checks until we're done type checking.
3522 let mut deferred_cast_checks = fcx.inh.deferred_cast_checks.borrow_mut();
3523 let cast_check = cast::CastCheck::new((**e).clone(), t_expr, t_cast, expr.span);
3524 deferred_cast_checks.push(cast_check);
3527 hir::ExprVec(ref args) => {
3528 let uty = expected.to_option(fcx).and_then(|uty| {
3530 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3535 let typ = match uty {
3538 check_expr_coercable_to_type(fcx, &**e, uty);
3543 let t: Ty = fcx.infcx().next_ty_var();
3545 check_expr_has_type(fcx, &**e, t);
3550 let typ = tcx.mk_array(typ, args.len());
3551 fcx.write_ty(id, typ);
3553 hir::ExprRepeat(ref element, ref count_expr) => {
3554 check_expr_has_type(fcx, &**count_expr, tcx.types.usize);
3555 let count = fcx.tcx().eval_repeat_count(&**count_expr);
3557 let uty = match expected {
3558 ExpectHasType(uty) => {
3560 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3567 let (element_ty, t) = match uty {
3569 check_expr_coercable_to_type(fcx, &**element, uty);
3573 let t: Ty = fcx.infcx().next_ty_var();
3574 check_expr_has_type(fcx, &**element, t);
3575 (fcx.expr_ty(&**element), t)
3580 // For [foo, ..n] where n > 1, `foo` must have
3582 fcx.require_type_meets(
3589 if element_ty.references_error() {
3590 fcx.write_error(id);
3592 let t = tcx.mk_array(t, count);
3593 fcx.write_ty(id, t);
3596 hir::ExprTup(ref elts) => {
3597 let flds = expected.only_has_type(fcx).and_then(|ty| {
3599 ty::TyTuple(ref flds) => Some(&flds[..]),
3603 let mut err_field = false;
3605 let elt_ts = elts.iter().enumerate().map(|(i, e)| {
3606 let t = match flds {
3607 Some(ref fs) if i < fs.len() => {
3609 check_expr_coercable_to_type(fcx, &**e, ety);
3613 check_expr_with_expectation(fcx, &**e, NoExpectation);
3617 err_field = err_field || t.references_error();
3621 fcx.write_error(id);
3623 let typ = tcx.mk_tup(elt_ts);
3624 fcx.write_ty(id, typ);
3627 hir::ExprStruct(ref path, ref fields, ref base_expr) => {
3628 check_expr_struct(fcx, expr, path, fields, base_expr);
3630 fcx.require_expr_have_sized_type(expr, traits::StructInitializerSized);
3632 hir::ExprField(ref base, ref field) => {
3633 check_field(fcx, expr, lvalue_pref, &**base, field);
3635 hir::ExprTupField(ref base, idx) => {
3636 check_tup_field(fcx, expr, lvalue_pref, &**base, idx);
3638 hir::ExprIndex(ref base, ref idx) => {
3639 check_expr_with_lvalue_pref(fcx, &**base, lvalue_pref);
3640 check_expr(fcx, &**idx);
3642 let base_t = fcx.expr_ty(&**base);
3643 let idx_t = fcx.expr_ty(&**idx);
3645 if base_t.references_error() {
3646 fcx.write_ty(id, base_t);
3647 } else if idx_t.references_error() {
3648 fcx.write_ty(id, idx_t);
3650 let base_t = structurally_resolved_type(fcx, expr.span, base_t);
3651 match lookup_indexing(fcx, expr, base, base_t, idx_t, lvalue_pref) {
3652 Some((index_ty, element_ty)) => {
3653 let idx_expr_ty = fcx.expr_ty(idx);
3654 demand::eqtype(fcx, expr.span, index_ty, idx_expr_ty);
3655 fcx.write_ty(id, element_ty);
3658 check_expr_has_type(fcx, &**idx, fcx.tcx().types.err);
3659 fcx.type_error_message(
3662 format!("cannot index a value of type `{}`",
3667 fcx.write_ty(id, fcx.tcx().types.err);
3672 hir::ExprRange(ref start, ref end) => {
3673 let t_start = start.as_ref().map(|e| {
3674 check_expr(fcx, &**e);
3677 let t_end = end.as_ref().map(|e| {
3678 check_expr(fcx, &**e);
3682 let idx_type = match (t_start, t_end) {
3683 (Some(ty), None) | (None, Some(ty)) => {
3686 (Some(t_start), Some(t_end)) if (t_start.references_error() ||
3687 t_end.references_error()) => {
3688 Some(fcx.tcx().types.err)
3690 (Some(t_start), Some(t_end)) => {
3691 Some(infer::common_supertype(fcx.infcx(),
3692 TypeOrigin::RangeExpression(expr.span),
3700 // Note that we don't check the type of start/end satisfy any
3701 // bounds because right now the range structs do not have any. If we add
3702 // some bounds, then we'll need to check `t_start` against them here.
3704 let range_type = match idx_type {
3705 Some(idx_type) if idx_type.references_error() => {
3709 // Find the did from the appropriate lang item.
3710 let did = match (start, end) {
3711 (&Some(_), &Some(_)) => tcx.lang_items.range_struct(),
3712 (&Some(_), &None) => tcx.lang_items.range_from_struct(),
3713 (&None, &Some(_)) => tcx.lang_items.range_to_struct(),
3715 tcx.sess.span_bug(expr.span, "full range should be dealt with above")
3719 if let Some(did) = did {
3720 let def = tcx.lookup_adt_def(did);
3721 let predicates = tcx.lookup_predicates(did);
3722 let substs = Substs::new_type(vec![idx_type], vec![]);
3723 let bounds = fcx.instantiate_bounds(expr.span, &substs, &predicates);
3724 fcx.add_obligations_for_parameters(
3725 traits::ObligationCause::new(expr.span,
3727 traits::ItemObligation(did)),
3730 tcx.mk_struct(def, tcx.mk_substs(substs))
3732 span_err!(tcx.sess, expr.span, E0236, "no lang item for range syntax");
3737 // Neither start nor end => RangeFull
3738 if let Some(did) = tcx.lang_items.range_full_struct() {
3740 tcx.lookup_adt_def(did),
3741 tcx.mk_substs(Substs::empty())
3744 span_err!(tcx.sess, expr.span, E0237, "no lang item for range syntax");
3750 fcx.write_ty(id, range_type);
3755 debug!("type of expr({}) {} is...", expr.id,
3756 pprust::expr_to_string(expr));
3757 debug!("... {:?}, expected is {:?}",
3764 pub fn resolve_ty_and_def_ufcs<'a, 'b, 'tcx>(fcx: &FnCtxt<'b, 'tcx>,
3765 path_res: def::PathResolution,
3766 opt_self_ty: Option<Ty<'tcx>>,
3767 path: &'a hir::Path,
3769 node_id: ast::NodeId)
3770 -> Option<(Option<Ty<'tcx>>,
3771 &'a [hir::PathSegment],
3775 // Associated constants can't depend on generic types.
3776 fn have_disallowed_generic_consts<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
3780 node_id: ast::NodeId) -> bool {
3782 def::DefAssociatedConst(..) => {
3783 if ty.has_param_types() || ty.has_self_ty() {
3784 span_err!(fcx.sess(), span, E0329,
3785 "Associated consts cannot depend \
3786 on type parameters or Self.");
3787 fcx.write_error(node_id);
3796 // If fully resolved already, we don't have to do anything.
3797 if path_res.depth == 0 {
3798 if let Some(ty) = opt_self_ty {
3799 if have_disallowed_generic_consts(fcx, path_res.full_def(), ty,
3804 Some((opt_self_ty, &path.segments, path_res.base_def))
3806 let mut def = path_res.base_def;
3807 let ty_segments = path.segments.split_last().unwrap().1;
3808 let base_ty_end = path.segments.len() - path_res.depth;
3809 let ty = astconv::finish_resolving_def_to_ty(fcx, fcx, span,
3810 PathParamMode::Optional,
3813 &ty_segments[..base_ty_end],
3814 &ty_segments[base_ty_end..]);
3815 let item_segment = path.segments.last().unwrap();
3816 let item_name = item_segment.identifier.name;
3817 match method::resolve_ufcs(fcx, span, item_name, ty, node_id) {
3819 if have_disallowed_generic_consts(fcx, def, ty, span, node_id) {
3822 // Write back the new resolution.
3823 fcx.ccx.tcx.def_map.borrow_mut()
3824 .insert(node_id, def::PathResolution {
3826 last_private: path_res.last_private.or(lp),
3829 Some((Some(ty), slice::ref_slice(item_segment), def))
3832 method::report_error(fcx, span, ty,
3833 item_name, None, error);
3834 fcx.write_error(node_id);
3841 impl<'tcx> Expectation<'tcx> {
3842 /// Provide an expectation for an rvalue expression given an *optional*
3843 /// hint, which is not required for type safety (the resulting type might
3844 /// be checked higher up, as is the case with `&expr` and `box expr`), but
3845 /// is useful in determining the concrete type.
3847 /// The primary use case is where the expected type is a fat pointer,
3848 /// like `&[isize]`. For example, consider the following statement:
3850 /// let x: &[isize] = &[1, 2, 3];
3852 /// In this case, the expected type for the `&[1, 2, 3]` expression is
3853 /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
3854 /// expectation `ExpectHasType([isize])`, that would be too strong --
3855 /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
3856 /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
3857 /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
3858 /// which still is useful, because it informs integer literals and the like.
3859 /// See the test case `test/run-pass/coerce-expect-unsized.rs` and #20169
3860 /// for examples of where this comes up,.
3861 fn rvalue_hint(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> {
3862 match tcx.struct_tail(ty).sty {
3863 ty::TySlice(_) | ty::TyTrait(..) => {
3864 ExpectRvalueLikeUnsized(ty)
3866 _ => ExpectHasType(ty)
3870 // Resolves `expected` by a single level if it is a variable. If
3871 // there is no expected type or resolution is not possible (e.g.,
3872 // no constraints yet present), just returns `None`.
3873 fn resolve<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> {
3878 ExpectCastableToType(t) => {
3879 ExpectCastableToType(
3880 fcx.infcx().resolve_type_vars_if_possible(&t))
3882 ExpectHasType(t) => {
3884 fcx.infcx().resolve_type_vars_if_possible(&t))
3886 ExpectRvalueLikeUnsized(t) => {
3887 ExpectRvalueLikeUnsized(
3888 fcx.infcx().resolve_type_vars_if_possible(&t))
3893 fn to_option<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
3894 match self.resolve(fcx) {
3895 NoExpectation => None,
3896 ExpectCastableToType(ty) |
3898 ExpectRvalueLikeUnsized(ty) => Some(ty),
3902 fn only_has_type<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
3903 match self.resolve(fcx) {
3904 ExpectHasType(ty) => Some(ty),
3910 pub fn check_decl_initializer<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
3911 local: &'tcx hir::Local,
3912 init: &'tcx hir::Expr)
3914 let ref_bindings = fcx.tcx().pat_contains_ref_binding(&local.pat);
3916 let local_ty = fcx.local_ty(init.span, local.id);
3917 if let Some(m) = ref_bindings {
3918 // Somewhat subtle: if we have a `ref` binding in the pattern,
3919 // we want to avoid introducing coercions for the RHS. This is
3920 // both because it helps preserve sanity and, in the case of
3921 // ref mut, for soundness (issue #23116). In particular, in
3922 // the latter case, we need to be clear that the type of the
3923 // referent for the reference that results is *equal to* the
3924 // type of the lvalue it is referencing, and not some
3925 // supertype thereof.
3926 check_expr_with_lvalue_pref(fcx, init, LvaluePreference::from_mutbl(m));
3927 let init_ty = fcx.expr_ty(init);
3928 demand::eqtype(fcx, init.span, init_ty, local_ty);
3930 check_expr_coercable_to_type(fcx, init, local_ty)
3934 pub fn check_decl_local<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, local: &'tcx hir::Local) {
3935 let tcx = fcx.ccx.tcx;
3937 let t = fcx.local_ty(local.span, local.id);
3938 fcx.write_ty(local.id, t);
3940 if let Some(ref init) = local.init {
3941 check_decl_initializer(fcx, local, &**init);
3942 let init_ty = fcx.expr_ty(&**init);
3943 if init_ty.references_error() {
3944 fcx.write_ty(local.id, init_ty);
3948 let pcx = pat_ctxt {
3950 map: pat_id_map(&tcx.def_map, &*local.pat),
3952 _match::check_pat(&pcx, &*local.pat, t);
3953 let pat_ty = fcx.node_ty(local.pat.id);
3954 if pat_ty.references_error() {
3955 fcx.write_ty(local.id, pat_ty);
3959 pub fn check_stmt<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, stmt: &'tcx hir::Stmt) {
3961 let mut saw_bot = false;
3962 let mut saw_err = false;
3964 hir::StmtDecl(ref decl, id) => {
3967 hir::DeclLocal(ref l) => {
3968 check_decl_local(fcx, &**l);
3969 let l_t = fcx.node_ty(l.id);
3970 saw_bot = saw_bot || fcx.infcx().type_var_diverges(l_t);
3971 saw_err = saw_err || l_t.references_error();
3973 hir::DeclItem(_) => {/* ignore for now */ }
3976 hir::StmtExpr(ref expr, id) => {
3978 // Check with expected type of ()
3979 check_expr_has_type(fcx, &**expr, fcx.tcx().mk_nil());
3980 let expr_ty = fcx.expr_ty(&**expr);
3981 saw_bot = saw_bot || fcx.infcx().type_var_diverges(expr_ty);
3982 saw_err = saw_err || expr_ty.references_error();
3984 hir::StmtSemi(ref expr, id) => {
3986 check_expr(fcx, &**expr);
3987 let expr_ty = fcx.expr_ty(&**expr);
3988 saw_bot |= fcx.infcx().type_var_diverges(expr_ty);
3989 saw_err |= expr_ty.references_error();
3993 fcx.write_ty(node_id, fcx.infcx().next_diverging_ty_var());
3996 fcx.write_error(node_id);
3999 fcx.write_nil(node_id)
4003 pub fn check_block_no_value<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, blk: &'tcx hir::Block) {
4004 check_block_with_expected(fcx, blk, ExpectHasType(fcx.tcx().mk_nil()));
4005 let blkty = fcx.node_ty(blk.id);
4006 if blkty.references_error() {
4007 fcx.write_error(blk.id);
4009 let nilty = fcx.tcx().mk_nil();
4010 demand::suptype(fcx, blk.span, nilty, blkty);
4014 fn check_block_with_expected<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4015 blk: &'tcx hir::Block,
4016 expected: Expectation<'tcx>) {
4018 let mut fcx_ps = fcx.ps.borrow_mut();
4019 let unsafety_state = fcx_ps.recurse(blk);
4020 replace(&mut *fcx_ps, unsafety_state)
4023 let mut warned = false;
4024 let mut any_diverges = false;
4025 let mut any_err = false;
4026 for s in &blk.stmts {
4028 let s_id = ::rustc_front::util::stmt_id(s);
4029 let s_ty = fcx.node_ty(s_id);
4030 if any_diverges && !warned && match s.node {
4031 hir::StmtDecl(ref decl, _) => {
4033 hir::DeclLocal(_) => true,
4037 hir::StmtExpr(_, _) | hir::StmtSemi(_, _) => true,
4042 .add_lint(lint::builtin::UNREACHABLE_CODE,
4045 "unreachable statement".to_string());
4048 any_diverges = any_diverges || fcx.infcx().type_var_diverges(s_ty);
4049 any_err = any_err || s_ty.references_error();
4052 None => if any_err {
4053 fcx.write_error(blk.id);
4054 } else if any_diverges {
4055 fcx.write_ty(blk.id, fcx.infcx().next_diverging_ty_var());
4057 fcx.write_nil(blk.id);
4060 if any_diverges && !warned {
4064 .add_lint(lint::builtin::UNREACHABLE_CODE,
4067 "unreachable expression".to_string());
4069 let ety = match expected {
4070 ExpectHasType(ety) => {
4071 check_expr_coercable_to_type(fcx, &**e, ety);
4075 check_expr_with_expectation(fcx, &**e, expected);
4081 fcx.write_error(blk.id);
4082 } else if any_diverges {
4083 fcx.write_ty(blk.id, fcx.infcx().next_diverging_ty_var());
4085 fcx.write_ty(blk.id, ety);
4090 *fcx.ps.borrow_mut() = prev;
4093 /// Checks a constant appearing in a type. At the moment this is just the
4094 /// length expression in a fixed-length vector, but someday it might be
4095 /// extended to type-level numeric literals.
4096 fn check_const_in_type<'a,'tcx>(ccx: &'a CrateCtxt<'a,'tcx>,
4097 expr: &'tcx hir::Expr,
4098 expected_type: Ty<'tcx>) {
4099 let tables = RefCell::new(ty::Tables::empty());
4100 let inh = static_inherited_fields(ccx, &tables);
4101 let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(expected_type), expr.id);
4102 check_const_with_ty(&fcx, expr.span, expr, expected_type);
4105 fn check_const<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
4109 let tables = RefCell::new(ty::Tables::empty());
4110 let inh = static_inherited_fields(ccx, &tables);
4111 let rty = ccx.tcx.node_id_to_type(id);
4112 let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(rty), e.id);
4113 let declty = fcx.ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(id)).ty;
4114 check_const_with_ty(&fcx, sp, e, declty);
4117 fn check_const_with_ty<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4121 // Gather locals in statics (because of block expressions).
4122 // This is technically unnecessary because locals in static items are forbidden,
4123 // but prevents type checking from blowing up before const checking can properly
4125 GatherLocalsVisitor { fcx: fcx }.visit_expr(e);
4127 check_expr_with_hint(fcx, e, declty);
4128 demand::coerce(fcx, e.span, declty, e);
4130 fcx.select_all_obligations_and_apply_defaults();
4131 upvar::closure_analyze_const(&fcx, e);
4132 fcx.select_obligations_where_possible();
4134 fcx.select_all_obligations_or_error();
4136 regionck::regionck_expr(fcx, e);
4137 writeback::resolve_type_vars_in_expr(fcx, e);
4140 /// Checks whether a type can be represented in memory. In particular, it
4141 /// identifies types that contain themselves without indirection through a
4142 /// pointer, which would mean their size is unbounded.
4143 pub fn check_representable(tcx: &ty::ctxt,
4145 item_id: ast::NodeId,
4146 designation: &str) -> bool {
4147 let rty = tcx.node_id_to_type(item_id);
4149 // Check that it is possible to represent this type. This call identifies
4150 // (1) types that contain themselves and (2) types that contain a different
4151 // recursive type. It is only necessary to throw an error on those that
4152 // contain themselves. For case 2, there must be an inner type that will be
4153 // caught by case 1.
4154 match rty.is_representable(tcx, sp) {
4155 Representability::SelfRecursive => {
4156 span_err!(tcx.sess, sp, E0072, "invalid recursive {} type", designation);
4157 tcx.sess.fileline_help(
4158 sp, "wrap the inner value in a box to make it representable");
4161 Representability::Representable | Representability::ContainsRecursive => (),
4166 pub fn check_simd(tcx: &ty::ctxt, sp: Span, id: ast::NodeId) {
4167 let t = tcx.node_id_to_type(id);
4169 ty::TyStruct(def, substs) => {
4170 let fields = &def.struct_variant().fields;
4171 if fields.is_empty() {
4172 span_err!(tcx.sess, sp, E0075, "SIMD vector cannot be empty");
4175 let e = fields[0].ty(tcx, substs);
4176 if !fields.iter().all(|f| f.ty(tcx, substs) == e) {
4177 span_err!(tcx.sess, sp, E0076, "SIMD vector should be homogeneous");
4181 ty::TyParam(_) => { /* struct<T>(T, T, T, T) is ok */ }
4182 _ if e.is_machine() => { /* struct(u8, u8, u8, u8) is ok */ }
4184 span_err!(tcx.sess, sp, E0077,
4185 "SIMD vector element type should be machine type");
4194 pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
4196 vs: &'tcx [hir::Variant],
4199 fn disr_in_range(ccx: &CrateCtxt,
4201 disr: ty::Disr) -> bool {
4202 fn uint_in_range(ccx: &CrateCtxt, ty: ast::UintTy, disr: ty::Disr) -> bool {
4204 ast::TyU8 => disr as u8 as Disr == disr,
4205 ast::TyU16 => disr as u16 as Disr == disr,
4206 ast::TyU32 => disr as u32 as Disr == disr,
4207 ast::TyU64 => disr as u64 as Disr == disr,
4208 ast::TyUs => uint_in_range(ccx, ccx.tcx.sess.target.uint_type, disr)
4211 fn int_in_range(ccx: &CrateCtxt, ty: ast::IntTy, disr: ty::Disr) -> bool {
4213 ast::TyI8 => disr as i8 as Disr == disr,
4214 ast::TyI16 => disr as i16 as Disr == disr,
4215 ast::TyI32 => disr as i32 as Disr == disr,
4216 ast::TyI64 => disr as i64 as Disr == disr,
4217 ast::TyIs => int_in_range(ccx, ccx.tcx.sess.target.int_type, disr)
4221 attr::UnsignedInt(ty) => uint_in_range(ccx, ty, disr),
4222 attr::SignedInt(ty) => int_in_range(ccx, ty, disr)
4226 fn do_check<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
4227 vs: &'tcx [hir::Variant],
4229 hint: attr::ReprAttr) {
4230 #![allow(trivial_numeric_casts)]
4232 let rty = ccx.tcx.node_id_to_type(id);
4233 let mut disr_vals: Vec<ty::Disr> = Vec::new();
4235 let tables = RefCell::new(ty::Tables::empty());
4236 let inh = static_inherited_fields(ccx, &tables);
4237 let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(rty), id);
4239 let (_, repr_type_ty) = ccx.tcx.enum_repr_type(Some(&hint));
4241 if let Some(ref e) = v.node.disr_expr {
4242 check_const_with_ty(&fcx, e.span, e, repr_type_ty);
4246 let def_id = ccx.tcx.map.local_def_id(id);
4248 let variants = &ccx.tcx.lookup_adt_def(def_id).variants;
4249 for (v, variant) in vs.iter().zip(variants.iter()) {
4250 let current_disr_val = variant.disr_val;
4252 // Check for duplicate discriminant values
4253 match disr_vals.iter().position(|&x| x == current_disr_val) {
4255 span_err!(ccx.tcx.sess, v.span, E0081,
4256 "discriminant value `{}` already exists", disr_vals[i]);
4257 let variant_i_node_id = ccx.tcx.map.as_local_node_id(variants[i].did).unwrap();
4258 span_note!(ccx.tcx.sess, ccx.tcx.map.span(variant_i_node_id),
4259 "conflicting discriminant here")
4263 // Check for unrepresentable discriminant values
4265 attr::ReprAny | attr::ReprExtern => (),
4266 attr::ReprInt(sp, ity) => {
4267 if !disr_in_range(ccx, ity, current_disr_val) {
4268 span_err!(ccx.tcx.sess, v.span, E0082,
4269 "discriminant value outside specified type");
4270 span_note!(ccx.tcx.sess, sp,
4271 "discriminant type specified here");
4275 ccx.tcx.sess.bug("range_to_inttype: found ReprSimd on an enum");
4277 attr::ReprPacked => {
4278 ccx.tcx.sess.bug("range_to_inttype: found ReprPacked on an enum");
4281 disr_vals.push(current_disr_val);
4285 let def_id = ccx.tcx.map.local_def_id(id);
4286 let hint = *ccx.tcx.lookup_repr_hints(def_id).get(0).unwrap_or(&attr::ReprAny);
4288 if hint != attr::ReprAny && vs.len() <= 1 {
4290 span_err!(ccx.tcx.sess, sp, E0083,
4291 "unsupported representation for univariant enum");
4293 span_err!(ccx.tcx.sess, sp, E0084,
4294 "unsupported representation for zero-variant enum");
4298 do_check(ccx, vs, id, hint);
4300 check_representable(ccx.tcx, sp, id, "enum");
4303 // Returns the type parameter count and the type for the given definition.
4304 fn type_scheme_and_predicates_for_def<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4307 -> (TypeScheme<'tcx>, GenericPredicates<'tcx>) {
4309 def::DefLocal(_, nid) | def::DefUpvar(_, nid, _, _) => {
4310 let typ = fcx.local_ty(sp, nid);
4311 (ty::TypeScheme { generics: ty::Generics::empty(), ty: typ },
4312 ty::GenericPredicates::empty())
4314 def::DefFn(id, _) | def::DefMethod(id) |
4315 def::DefStatic(id, _) | def::DefVariant(_, id, _) |
4316 def::DefStruct(id) | def::DefConst(id) | def::DefAssociatedConst(id) => {
4317 (fcx.tcx().lookup_item_type(id), fcx.tcx().lookup_predicates(id))
4321 def::DefAssociatedTy(..) |
4323 def::DefTyParam(..) |
4325 def::DefForeignMod(..) |
4328 def::DefSelfTy(..) => {
4329 fcx.ccx.tcx.sess.span_bug(sp, &format!("expected value, found {:?}", defn));
4334 // Instantiates the given path, which must refer to an item with the given
4335 // number of type parameters and type.
4336 pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4337 segments: &[hir::PathSegment],
4338 type_scheme: TypeScheme<'tcx>,
4339 type_predicates: &ty::GenericPredicates<'tcx>,
4340 opt_self_ty: Option<Ty<'tcx>>,
4343 node_id: ast::NodeId) {
4344 debug!("instantiate_path(path={:?}, def={:?}, node_id={}, type_scheme={:?})",
4350 // We need to extract the type parameters supplied by the user in
4351 // the path `path`. Due to the current setup, this is a bit of a
4352 // tricky-process; the problem is that resolve only tells us the
4353 // end-point of the path resolution, and not the intermediate steps.
4354 // Luckily, we can (at least for now) deduce the intermediate steps
4355 // just from the end-point.
4357 // There are basically four cases to consider:
4359 // 1. Reference to a *type*, such as a struct or enum:
4361 // mod a { struct Foo<T> { ... } }
4363 // Because we don't allow types to be declared within one
4364 // another, a path that leads to a type will always look like
4365 // `a::b::Foo<T>` where `a` and `b` are modules. This implies
4366 // that only the final segment can have type parameters, and
4367 // they are located in the TypeSpace.
4369 // *Note:* Generally speaking, references to types don't
4370 // actually pass through this function, but rather the
4371 // `ast_ty_to_ty` function in `astconv`. However, in the case
4372 // of struct patterns (and maybe literals) we do invoke
4373 // `instantiate_path` to get the general type of an instance of
4374 // a struct. (In these cases, there are actually no type
4375 // parameters permitted at present, but perhaps we will allow
4376 // them in the future.)
4378 // 1b. Reference to an enum variant or tuple-like struct:
4380 // struct foo<T>(...)
4381 // enum E<T> { foo(...) }
4383 // In these cases, the parameters are declared in the type
4386 // 2. Reference to a *fn item*:
4390 // In this case, the path will again always have the form
4391 // `a::b::foo::<T>` where only the final segment should have
4392 // type parameters. However, in this case, those parameters are
4393 // declared on a value, and hence are in the `FnSpace`.
4395 // 3. Reference to a *method*:
4397 // impl<A> SomeStruct<A> {
4401 // Here we can have a path like
4402 // `a::b::SomeStruct::<A>::foo::<B>`, in which case parameters
4403 // may appear in two places. The penultimate segment,
4404 // `SomeStruct::<A>`, contains parameters in TypeSpace, and the
4405 // final segment, `foo::<B>` contains parameters in fn space.
4407 // 4. Reference to an *associated const*:
4409 // impl<A> AnotherStruct<A> {
4410 // const FOO: B = BAR;
4413 // The path in this case will look like
4414 // `a::b::AnotherStruct::<A>::FOO`, so the penultimate segment
4415 // only will have parameters in TypeSpace.
4417 // The first step then is to categorize the segments appropriately.
4419 assert!(!segments.is_empty());
4421 let mut ufcs_associated = None;
4422 let mut segment_spaces: Vec<_>;
4424 // Case 1 and 1b. Reference to a *type* or *enum variant*.
4425 def::DefSelfTy(..) |
4426 def::DefStruct(..) |
4427 def::DefVariant(..) |
4429 def::DefAssociatedTy(..) |
4431 def::DefPrimTy(..) |
4432 def::DefTyParam(..) => {
4433 // Everything but the final segment should have no
4434 // parameters at all.
4435 segment_spaces = vec![None; segments.len() - 1];
4436 segment_spaces.push(Some(subst::TypeSpace));
4439 // Case 2. Reference to a top-level value.
4442 def::DefStatic(..) => {
4443 segment_spaces = vec![None; segments.len() - 1];
4444 segment_spaces.push(Some(subst::FnSpace));
4447 // Case 3. Reference to a method.
4448 def::DefMethod(def_id) => {
4449 let container = fcx.tcx().impl_or_trait_item(def_id).container();
4451 ty::TraitContainer(trait_did) => {
4452 callee::check_legal_trait_for_method_call(fcx.ccx, span, trait_did)
4454 ty::ImplContainer(_) => {}
4457 if segments.len() >= 2 {
4458 segment_spaces = vec![None; segments.len() - 2];
4459 segment_spaces.push(Some(subst::TypeSpace));
4460 segment_spaces.push(Some(subst::FnSpace));
4462 // `<T>::method` will end up here, and so can `T::method`.
4463 let self_ty = opt_self_ty.expect("UFCS sugared method missing Self");
4464 segment_spaces = vec![Some(subst::FnSpace)];
4465 ufcs_associated = Some((container, self_ty));
4469 def::DefAssociatedConst(def_id) => {
4470 let container = fcx.tcx().impl_or_trait_item(def_id).container();
4472 ty::TraitContainer(trait_did) => {
4473 callee::check_legal_trait_for_method_call(fcx.ccx, span, trait_did)
4475 ty::ImplContainer(_) => {}
4478 if segments.len() >= 2 {
4479 segment_spaces = vec![None; segments.len() - 2];
4480 segment_spaces.push(Some(subst::TypeSpace));
4481 segment_spaces.push(None);
4483 // `<T>::CONST` will end up here, and so can `T::CONST`.
4484 let self_ty = opt_self_ty.expect("UFCS sugared const missing Self");
4485 segment_spaces = vec![None];
4486 ufcs_associated = Some((container, self_ty));
4490 // Other cases. Various nonsense that really shouldn't show up
4491 // here. If they do, an error will have been reported
4492 // elsewhere. (I hope)
4494 def::DefForeignMod(..) |
4498 def::DefUpvar(..) => {
4499 segment_spaces = vec![None; segments.len()];
4502 assert_eq!(segment_spaces.len(), segments.len());
4504 // In `<T as Trait<A, B>>::method`, `A` and `B` are mandatory, but
4505 // `opt_self_ty` can also be Some for `Foo::method`, where Foo's
4506 // type parameters are not mandatory.
4507 let require_type_space = opt_self_ty.is_some() && ufcs_associated.is_none();
4509 debug!("segment_spaces={:?}", segment_spaces);
4511 // Next, examine the definition, and determine how many type
4512 // parameters we expect from each space.
4513 let type_defs = &type_scheme.generics.types;
4514 let region_defs = &type_scheme.generics.regions;
4516 // Now that we have categorized what space the parameters for each
4517 // segment belong to, let's sort out the parameters that the user
4518 // provided (if any) into their appropriate spaces. We'll also report
4519 // errors if type parameters are provided in an inappropriate place.
4520 let mut substs = Substs::empty();
4521 for (opt_space, segment) in segment_spaces.iter().zip(segments) {
4524 prohibit_type_params(fcx.tcx(), slice::ref_slice(segment));
4528 push_explicit_parameters_from_segment_to_substs(fcx,
4538 if let Some(self_ty) = opt_self_ty {
4539 if type_defs.len(subst::SelfSpace) == 1 {
4540 substs.types.push(subst::SelfSpace, self_ty);
4544 // Now we have to compare the types that the user *actually*
4545 // provided against the types that were *expected*. If the user
4546 // did not provide any types, then we want to substitute inference
4547 // variables. If the user provided some types, we may still need
4548 // to add defaults. If the user provided *too many* types, that's
4550 for &space in &[subst::SelfSpace, subst::TypeSpace, subst::FnSpace] {
4551 adjust_type_parameters(fcx, span, space, type_defs,
4552 require_type_space, &mut substs);
4553 assert_eq!(substs.types.len(space), type_defs.len(space));
4555 adjust_region_parameters(fcx, span, space, region_defs, &mut substs);
4556 assert_eq!(substs.regions().len(space), region_defs.len(space));
4559 // The things we are substituting into the type should not contain
4560 // escaping late-bound regions, and nor should the base type scheme.
4561 assert!(!substs.has_regions_escaping_depth(0));
4562 assert!(!type_scheme.has_escaping_regions());
4564 // Add all the obligations that are required, substituting and
4565 // normalized appropriately.
4566 let bounds = fcx.instantiate_bounds(span, &substs, &type_predicates);
4567 fcx.add_obligations_for_parameters(
4568 traits::ObligationCause::new(span, fcx.body_id, traits::ItemObligation(def.def_id())),
4571 // Substitute the values for the type parameters into the type of
4572 // the referenced item.
4573 let ty_substituted = fcx.instantiate_type_scheme(span, &substs, &type_scheme.ty);
4576 if let Some((ty::ImplContainer(impl_def_id), self_ty)) = ufcs_associated {
4577 // In the case of `Foo<T>::method` and `<Foo<T>>::method`, if `method`
4578 // is inherent, there is no `Self` parameter, instead, the impl needs
4579 // type parameters, which we can infer by unifying the provided `Self`
4580 // with the substituted impl type.
4581 let impl_scheme = fcx.tcx().lookup_item_type(impl_def_id);
4582 assert_eq!(substs.types.len(subst::TypeSpace),
4583 impl_scheme.generics.types.len(subst::TypeSpace));
4584 assert_eq!(substs.regions().len(subst::TypeSpace),
4585 impl_scheme.generics.regions.len(subst::TypeSpace));
4587 let impl_ty = fcx.instantiate_type_scheme(span, &substs, &impl_scheme.ty);
4588 if fcx.mk_subty(false, TypeOrigin::Misc(span), self_ty, impl_ty).is_err() {
4589 fcx.tcx().sess.span_bug(span,
4591 "instantiate_path: (UFCS) {:?} was a subtype of {:?} but now is not?",
4597 debug!("instantiate_path: type of {:?} is {:?}",
4600 fcx.write_ty(node_id, ty_substituted);
4601 fcx.write_substs(node_id, ty::ItemSubsts { substs: substs });
4604 /// Finds the parameters that the user provided and adds them to `substs`. If too many
4605 /// parameters are provided, then reports an error and clears the output vector.
4607 /// We clear the output vector because that will cause the `adjust_XXX_parameters()` later to
4608 /// use inference variables. This seems less likely to lead to derived errors.
4610 /// Note that we *do not* check for *too few* parameters here. Due to the presence of defaults
4611 /// etc that is more complicated. I wanted however to do the reporting of *too many* parameters
4612 /// here because we can easily use the precise span of the N+1'th parameter.
4613 fn push_explicit_parameters_from_segment_to_substs<'a, 'tcx>(
4614 fcx: &FnCtxt<'a, 'tcx>,
4615 space: subst::ParamSpace,
4617 type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4618 region_defs: &VecPerParamSpace<ty::RegionParameterDef>,
4619 segment: &hir::PathSegment,
4620 substs: &mut Substs<'tcx>)
4622 match segment.parameters {
4623 hir::AngleBracketedParameters(ref data) => {
4624 push_explicit_angle_bracketed_parameters_from_segment_to_substs(
4625 fcx, space, type_defs, region_defs, data, substs);
4628 hir::ParenthesizedParameters(ref data) => {
4629 span_err!(fcx.tcx().sess, span, E0238,
4630 "parenthesized parameters may only be used with a trait");
4631 push_explicit_parenthesized_parameters_from_segment_to_substs(
4632 fcx, space, span, type_defs, data, substs);
4637 fn push_explicit_angle_bracketed_parameters_from_segment_to_substs<'a, 'tcx>(
4638 fcx: &FnCtxt<'a, 'tcx>,
4639 space: subst::ParamSpace,
4640 type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4641 region_defs: &VecPerParamSpace<ty::RegionParameterDef>,
4642 data: &hir::AngleBracketedParameterData,
4643 substs: &mut Substs<'tcx>)
4646 let type_count = type_defs.len(space);
4647 assert_eq!(substs.types.len(space), 0);
4648 for (i, typ) in data.types.iter().enumerate() {
4649 let t = fcx.to_ty(&**typ);
4651 substs.types.push(space, t);
4652 } else if i == type_count {
4653 span_err!(fcx.tcx().sess, typ.span, E0087,
4654 "too many type parameters provided: \
4655 expected at most {} parameter{}, \
4656 found {} parameter{}",
4658 if type_count == 1 {""} else {"s"},
4660 if data.types.len() == 1 {""} else {"s"});
4661 substs.types.truncate(space, 0);
4667 if !data.bindings.is_empty() {
4668 span_err!(fcx.tcx().sess, data.bindings[0].span, E0182,
4669 "unexpected binding of associated item in expression path \
4670 (only allowed in type paths)");
4674 let region_count = region_defs.len(space);
4675 assert_eq!(substs.regions().len(space), 0);
4676 for (i, lifetime) in data.lifetimes.iter().enumerate() {
4677 let r = ast_region_to_region(fcx.tcx(), lifetime);
4678 if i < region_count {
4679 substs.mut_regions().push(space, r);
4680 } else if i == region_count {
4681 span_err!(fcx.tcx().sess, lifetime.span, E0088,
4682 "too many lifetime parameters provided: \
4683 expected {} parameter{}, found {} parameter{}",
4685 if region_count == 1 {""} else {"s"},
4686 data.lifetimes.len(),
4687 if data.lifetimes.len() == 1 {""} else {"s"});
4688 substs.mut_regions().truncate(space, 0);
4696 /// `push_explicit_angle_bracketed_parameters_from_segment_to_substs`,
4697 /// but intended for `Foo(A,B) -> C` form. This expands to
4698 /// roughly the same thing as `Foo<(A,B),C>`. One important
4699 /// difference has to do with the treatment of anonymous
4700 /// regions, which are translated into bound regions (NYI).
4701 fn push_explicit_parenthesized_parameters_from_segment_to_substs<'a, 'tcx>(
4702 fcx: &FnCtxt<'a, 'tcx>,
4703 space: subst::ParamSpace,
4705 type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4706 data: &hir::ParenthesizedParameterData,
4707 substs: &mut Substs<'tcx>)
4709 let type_count = type_defs.len(space);
4711 span_err!(fcx.tcx().sess, span, E0167,
4712 "parenthesized form always supplies 2 type parameters, \
4713 but only {} parameter(s) were expected",
4717 let input_tys: Vec<Ty> =
4718 data.inputs.iter().map(|ty| fcx.to_ty(&**ty)).collect();
4720 let tuple_ty = fcx.tcx().mk_tup(input_tys);
4722 if type_count >= 1 {
4723 substs.types.push(space, tuple_ty);
4726 let output_ty: Option<Ty> =
4727 data.output.as_ref().map(|ty| fcx.to_ty(&**ty));
4730 output_ty.unwrap_or(fcx.tcx().mk_nil());
4732 if type_count >= 2 {
4733 substs.types.push(space, output_ty);
4737 fn adjust_type_parameters<'a, 'tcx>(
4738 fcx: &FnCtxt<'a, 'tcx>,
4741 defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4742 require_type_space: bool,
4743 substs: &mut Substs<'tcx>)
4745 let provided_len = substs.types.len(space);
4746 let desired = defs.get_slice(space);
4747 let required_len = desired.iter()
4748 .take_while(|d| d.default.is_none())
4751 debug!("adjust_type_parameters(space={:?}, \
4760 // Enforced by `push_explicit_parameters_from_segment_to_substs()`.
4761 assert!(provided_len <= desired.len());
4763 // Nothing specified at all: supply inference variables for
4765 if provided_len == 0 && !(require_type_space && space == subst::TypeSpace) {
4766 substs.types.replace(space, Vec::new());
4767 fcx.infcx().type_vars_for_defs(span, space, substs, &desired[..]);
4771 // Too few parameters specified: report an error and use Err
4773 if provided_len < required_len {
4775 if desired.len() != required_len { "at least " } else { "" };
4776 span_err!(fcx.tcx().sess, span, E0089,
4777 "too few type parameters provided: expected {}{} parameter{}, \
4778 found {} parameter{}",
4779 qualifier, required_len,
4780 if required_len == 1 {""} else {"s"},
4782 if provided_len == 1 {""} else {"s"});
4783 substs.types.replace(space, vec![fcx.tcx().types.err; desired.len()]);
4787 // Otherwise, add in any optional parameters that the user
4788 // omitted. The case of *too many* parameters is handled
4790 // push_explicit_parameters_from_segment_to_substs(). Note
4791 // that the *default* type are expressed in terms of all prior
4792 // parameters, so we have to substitute as we go with the
4793 // partial substitution that we have built up.
4794 for i in provided_len..desired.len() {
4795 let default = desired[i].default.unwrap();
4796 let default = default.subst_spanned(fcx.tcx(), substs, Some(span));
4797 substs.types.push(space, default);
4799 assert_eq!(substs.types.len(space), desired.len());
4801 debug!("Final substs: {:?}", substs);
4804 fn adjust_region_parameters(
4808 defs: &VecPerParamSpace<ty::RegionParameterDef>,
4809 substs: &mut Substs)
4811 let provided_len = substs.mut_regions().len(space);
4812 let desired = defs.get_slice(space);
4814 // Enforced by `push_explicit_parameters_from_segment_to_substs()`.
4815 assert!(provided_len <= desired.len());
4817 // If nothing was provided, just use inference variables.
4818 if provided_len == 0 {
4819 substs.mut_regions().replace(
4821 fcx.infcx().region_vars_for_defs(span, desired));
4825 // If just the right number were provided, everybody is happy.
4826 if provided_len == desired.len() {
4830 // Otherwise, too few were provided. Report an error and then
4831 // use inference variables.
4832 span_err!(fcx.tcx().sess, span, E0090,
4833 "too few lifetime parameters provided: expected {} parameter{}, \
4834 found {} parameter{}",
4836 if desired.len() == 1 {""} else {"s"},
4838 if provided_len == 1 {""} else {"s"});
4840 substs.mut_regions().replace(
4842 fcx.infcx().region_vars_for_defs(span, desired));
4846 fn structurally_resolve_type_or_else<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
4850 where F: Fn() -> Ty<'tcx>
4852 let mut ty = fcx.resolve_type_vars_if_possible(ty);
4855 let alternative = f();
4858 if alternative.is_ty_var() || alternative.references_error() {
4859 fcx.type_error_message(sp, |_actual| {
4860 "the type of this value must be known in this context".to_string()
4862 demand::suptype(fcx, sp, fcx.tcx().types.err, ty);
4863 ty = fcx.tcx().types.err;
4865 demand::suptype(fcx, sp, alternative, ty);
4873 // Resolves `typ` by a single level if `typ` is a type variable. If no
4874 // resolution is possible, then an error is reported.
4875 pub fn structurally_resolved_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4880 structurally_resolve_type_or_else(fcx, sp, ty, || {
4885 // Returns true if b contains a break that can exit from b
4886 pub fn may_break(cx: &ty::ctxt, id: ast::NodeId, b: &hir::Block) -> bool {
4887 // First: is there an unlabeled break immediately
4889 (loop_query(&*b, |e| {
4891 hir::ExprBreak(None) => true,
4895 // Second: is there a labeled break with label
4896 // <id> nested anywhere inside the loop?
4897 (block_query(b, |e| {
4898 if let hir::ExprBreak(Some(_)) = e.node {
4899 lookup_full_def(cx, e.span, e.id) == def::DefLabel(id)
4906 pub fn check_bounds_are_used<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
4907 tps: &OwnedSlice<hir::TyParam>,
4909 debug!("check_bounds_are_used(n_tps={}, ty={:?})",
4912 // make a vector of booleans initially false, set to true when used
4913 if tps.is_empty() { return; }
4914 let mut tps_used = vec![false; tps.len()];
4916 for leaf_ty in ty.walk() {
4917 if let ty::TyParam(ParamTy {idx, ..}) = leaf_ty.sty {
4918 debug!("Found use of ty param num {}", idx);
4919 tps_used[idx as usize] = true;
4923 for (i, b) in tps_used.iter().enumerate() {
4925 span_err!(ccx.tcx.sess, tps[i].span, E0091,
4926 "type parameter `{}` is unused",