1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
15 Within the check phase of type check, we check each item one at a time
16 (bodies of function expressions are checked as part of the containing
17 function). Inference is used to supply types wherever they are
20 By far the most complex case is checking the body of a function. This
21 can be broken down into several distinct phases:
23 - gather: creates type variables to represent the type of each local
24 variable and pattern binding.
26 - main: the main pass does the lion's share of the work: it
27 determines the types of all expressions, resolves
28 methods, checks for most invalid conditions, and so forth. In
29 some cases, where a type is unknown, it may create a type or region
30 variable and use that as the type of an expression.
32 In the process of checking, various constraints will be placed on
33 these type variables through the subtyping relationships requested
34 through the `demand` module. The `infer` module is in charge
35 of resolving those constraints.
37 - regionck: after main is complete, the regionck pass goes over all
38 types looking for regions and making sure that they did not escape
39 into places they are not in scope. This may also influence the
40 final assignments of the various region variables if there is some
43 - vtable: find and records the impls to use for each trait bound that
44 appears on a type parameter.
46 - writeback: writes the final types within a function body, replacing
47 type variables with their final inferred types. These final types
48 are written into the `tcx.node_types` table, which should *never* contain
49 any reference to a type variable.
53 While type checking a function, the intermediate types for the
54 expressions, blocks, and so forth contained within the function are
55 stored in `fcx.node_types` and `fcx.item_substs`. These types
56 may contain unresolved type variables. After type checking is
57 complete, the functions in the writeback module are used to take the
58 types from this table, resolve them, and then write them into their
59 permanent home in the type context `ccx.tcx`.
61 This means that during inferencing you should use `fcx.write_ty()`
62 and `fcx.expr_ty()` / `fcx.node_ty()` to write/obtain the types of
63 nodes within the function.
65 The types of top-level items, which never contain unbound type
66 variables, are stored directly into the `tcx` tables.
68 n.b.: A type variable is not the same thing as a type parameter. A
69 type variable is rather an "instance" of a type parameter: that is,
70 given a generic function `fn foo<T>(t: T)`: while checking the
71 function `foo`, the type `ty_param(0)` refers to the type `T`, which
72 is treated in abstract. When `foo()` is called, however, `T` will be
73 substituted for a fresh type variable `N`. This variable will
74 eventually be resolved to some concrete type (which might itself be
79 pub use self::Expectation::*;
80 pub use self::compare_method::{compare_impl_method, compare_const_impl};
81 use self::TupleArgumentsFlag::*;
83 use astconv::{self, ast_region_to_region, ast_ty_to_ty, AstConv, PathParamMode};
84 use check::_match::pat_ctxt;
85 use dep_graph::DepNode;
86 use fmt_macros::{Parser, Piece, Position};
87 use middle::astconv_util::prohibit_type_params;
88 use middle::cstore::LOCAL_CRATE;
89 use middle::def::{self, Def};
90 use middle::def_id::DefId;
92 use middle::infer::{TypeOrigin, TypeTrace, type_variable};
93 use middle::pat_util::{self, pat_id_map};
94 use middle::subst::{self, Subst, Substs, VecPerParamSpace, ParamSpace};
95 use middle::traits::{self, report_fulfillment_errors, ProjectionMode};
96 use middle::ty::{GenericPredicates, TypeScheme};
97 use middle::ty::{ParamTy, ParameterEnvironment};
98 use middle::ty::{LvaluePreference, NoPreference, PreferMutLvalue};
99 use middle::ty::{self, ToPolyTraitRef, Ty, TyCtxt};
100 use middle::ty::{MethodCall, MethodCallee};
101 use middle::ty::adjustment;
102 use middle::ty::error::TypeError;
103 use middle::ty::fold::{TypeFolder, TypeFoldable};
104 use middle::ty::relate::TypeRelation;
105 use middle::ty::util::{Representability, IntTypeExt};
106 use require_c_abi_if_variadic;
107 use rscope::{ElisionFailureInfo, RegionScope};
108 use session::{Session, CompileResult};
109 use {CrateCtxt, lookup_full_def};
112 use util::common::{block_query, ErrorReported, indenter, loop_query};
113 use util::nodemap::{DefIdMap, FnvHashMap, NodeMap};
115 use std::cell::{Cell, Ref, RefCell};
116 use std::collections::{HashSet};
117 use std::mem::replace;
118 use syntax::abi::Abi;
121 use syntax::attr::AttrMetaMethods;
122 use syntax::codemap::{self, Span, Spanned};
123 use syntax::errors::DiagnosticBuilder;
124 use syntax::parse::token::{self, InternedString, special_idents};
126 use syntax::util::lev_distance::find_best_match_for_name;
128 use rustc_front::intravisit::{self, Visitor};
129 use rustc_front::hir;
130 use rustc_front::hir::{Visibility, PatKind};
131 use rustc_front::print::pprust;
132 use rustc_back::slice;
151 /// closures defined within the function. For example:
154 /// bar(move|| { ... })
157 /// Here, the function `foo()` and the closure passed to
158 /// `bar()` will each have their own `FnCtxt`, but they will
159 /// share the inherited fields.
160 pub struct Inherited<'a, 'tcx: 'a> {
161 infcx: infer::InferCtxt<'a, 'tcx>,
162 locals: RefCell<NodeMap<Ty<'tcx>>>,
164 fulfillment_cx: RefCell<traits::FulfillmentContext<'tcx>>,
166 tables: &'a RefCell<ty::Tables<'tcx>>,
168 // When we process a call like `c()` where `c` is a closure type,
169 // we may not have decided yet whether `c` is a `Fn`, `FnMut`, or
170 // `FnOnce` closure. In that case, we defer full resolution of the
171 // call until upvar inference can kick in and make the
172 // decision. We keep these deferred resolutions grouped by the
173 // def-id of the closure, so that once we decide, we can easily go
174 // back and process them.
175 deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolutionHandler<'tcx>>>>,
177 deferred_cast_checks: RefCell<Vec<cast::CastCheck<'tcx>>>,
180 trait DeferredCallResolution<'tcx> {
181 fn resolve<'a>(&mut self, fcx: &FnCtxt<'a,'tcx>);
184 type DeferredCallResolutionHandler<'tcx> = Box<DeferredCallResolution<'tcx>+'tcx>;
186 /// When type-checking an expression, we propagate downward
187 /// whatever type hint we are able in the form of an `Expectation`.
188 #[derive(Copy, Clone, Debug)]
189 pub enum Expectation<'tcx> {
190 /// We know nothing about what type this expression should have.
193 /// This expression should have the type given (or some subtype)
194 ExpectHasType(Ty<'tcx>),
196 /// This expression will be cast to the `Ty`
197 ExpectCastableToType(Ty<'tcx>),
199 /// This rvalue expression will be wrapped in `&` or `Box` and coerced
200 /// to `&Ty` or `Box<Ty>`, respectively. `Ty` is `[A]` or `Trait`.
201 ExpectRvalueLikeUnsized(Ty<'tcx>),
204 impl<'tcx> Expectation<'tcx> {
205 // Disregard "castable to" expectations because they
206 // can lead us astray. Consider for example `if cond
207 // {22} else {c} as u8` -- if we propagate the
208 // "castable to u8" constraint to 22, it will pick the
209 // type 22u8, which is overly constrained (c might not
210 // be a u8). In effect, the problem is that the
211 // "castable to" expectation is not the tightest thing
212 // we can say, so we want to drop it in this case.
213 // The tightest thing we can say is "must unify with
214 // else branch". Note that in the case of a "has type"
215 // constraint, this limitation does not hold.
217 // If the expected type is just a type variable, then don't use
218 // an expected type. Otherwise, we might write parts of the type
219 // when checking the 'then' block which are incompatible with the
221 fn adjust_for_branches<'a>(&self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> {
223 ExpectHasType(ety) => {
224 let ety = fcx.infcx().shallow_resolve(ety);
225 if !ety.is_ty_var() {
231 ExpectRvalueLikeUnsized(ety) => {
232 ExpectRvalueLikeUnsized(ety)
239 #[derive(Copy, Clone)]
240 pub struct UnsafetyState {
241 pub def: ast::NodeId,
242 pub unsafety: hir::Unsafety,
243 pub unsafe_push_count: u32,
248 pub fn function(unsafety: hir::Unsafety, def: ast::NodeId) -> UnsafetyState {
249 UnsafetyState { def: def, unsafety: unsafety, unsafe_push_count: 0, from_fn: true }
252 pub fn recurse(&mut self, blk: &hir::Block) -> UnsafetyState {
253 match self.unsafety {
254 // If this unsafe, then if the outer function was already marked as
255 // unsafe we shouldn't attribute the unsafe'ness to the block. This
256 // way the block can be warned about instead of ignoring this
257 // extraneous block (functions are never warned about).
258 hir::Unsafety::Unsafe if self.from_fn => *self,
261 let (unsafety, def, count) = match blk.rules {
262 hir::PushUnsafeBlock(..) =>
263 (unsafety, blk.id, self.unsafe_push_count.checked_add(1).unwrap()),
264 hir::PopUnsafeBlock(..) =>
265 (unsafety, blk.id, self.unsafe_push_count.checked_sub(1).unwrap()),
266 hir::UnsafeBlock(..) =>
267 (hir::Unsafety::Unsafe, blk.id, self.unsafe_push_count),
268 hir::DefaultBlock | hir::PushUnstableBlock | hir:: PopUnstableBlock =>
269 (unsafety, self.def, self.unsafe_push_count),
271 UnsafetyState{ def: def,
273 unsafe_push_count: count,
281 pub struct FnCtxt<'a, 'tcx: 'a> {
282 body_id: ast::NodeId,
284 // This flag is set to true if, during the writeback phase, we encounter
285 // a type error in this function.
286 writeback_errors: Cell<bool>,
288 // Number of errors that had been reported when we started
289 // checking this function. On exit, if we find that *more* errors
290 // have been reported, we will skip regionck and other work that
291 // expects the types within the function to be consistent.
292 err_count_on_creation: usize,
294 ret_ty: ty::FnOutput<'tcx>,
296 ps: RefCell<UnsafetyState>,
298 inh: &'a Inherited<'a, 'tcx>,
300 ccx: &'a CrateCtxt<'a, 'tcx>,
303 impl<'a, 'tcx> Inherited<'a, 'tcx> {
304 fn new(tcx: &'a TyCtxt<'tcx>,
305 tables: &'a RefCell<ty::Tables<'tcx>>,
306 param_env: ty::ParameterEnvironment<'a, 'tcx>)
307 -> Inherited<'a, 'tcx> {
310 infcx: infer::new_infer_ctxt(tcx, tables, Some(param_env), ProjectionMode::AnyFinal),
311 fulfillment_cx: RefCell::new(traits::FulfillmentContext::new()),
312 locals: RefCell::new(NodeMap()),
314 deferred_call_resolutions: RefCell::new(DefIdMap()),
315 deferred_cast_checks: RefCell::new(Vec::new()),
319 fn normalize_associated_types_in<T>(&self,
321 body_id: ast::NodeId,
324 where T : TypeFoldable<'tcx>
326 assoc::normalize_associated_types_in(&self.infcx,
327 &mut self.fulfillment_cx.borrow_mut(),
335 // Used by check_const and check_enum_variants
336 pub fn blank_fn_ctxt<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
337 inh: &'a Inherited<'a, 'tcx>,
338 rty: ty::FnOutput<'tcx>,
339 body_id: ast::NodeId)
340 -> FnCtxt<'a, 'tcx> {
343 writeback_errors: Cell::new(false),
344 err_count_on_creation: ccx.tcx.sess.err_count(),
346 ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, 0)),
352 fn static_inherited_fields<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
353 tables: &'a RefCell<ty::Tables<'tcx>>)
354 -> Inherited<'a, 'tcx> {
355 // It's kind of a kludge to manufacture a fake function context
356 // and statement context, but we might as well do write the code only once
357 let param_env = ccx.tcx.empty_parameter_environment();
358 Inherited::new(ccx.tcx, &tables, param_env)
361 struct CheckItemTypesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
362 struct CheckItemBodiesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> }
364 impl<'a, 'tcx> Visitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> {
365 fn visit_item(&mut self, i: &'tcx hir::Item) {
366 check_item_type(self.ccx, i);
367 intravisit::walk_item(self, i);
370 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
372 hir::TyFixedLengthVec(_, ref expr) => {
373 check_const_in_type(self.ccx, &expr, self.ccx.tcx.types.usize);
378 intravisit::walk_ty(self, t);
382 impl<'a, 'tcx> Visitor<'tcx> for CheckItemBodiesVisitor<'a, 'tcx> {
383 fn visit_item(&mut self, i: &'tcx hir::Item) {
384 check_item_body(self.ccx, i);
388 pub fn check_wf_new(ccx: &CrateCtxt) -> CompileResult {
389 ccx.tcx.sess.track_errors(|| {
390 let mut visit = wfcheck::CheckTypeWellFormedVisitor::new(ccx);
391 ccx.tcx.visit_all_items_in_krate(DepNode::WfCheck, &mut visit);
395 pub fn check_item_types(ccx: &CrateCtxt) -> CompileResult {
396 ccx.tcx.sess.track_errors(|| {
397 let mut visit = CheckItemTypesVisitor { ccx: ccx };
398 ccx.tcx.visit_all_items_in_krate(DepNode::TypeckItemType, &mut visit);
402 pub fn check_item_bodies(ccx: &CrateCtxt) -> CompileResult {
403 ccx.tcx.sess.track_errors(|| {
404 let mut visit = CheckItemBodiesVisitor { ccx: ccx };
405 ccx.tcx.visit_all_items_in_krate(DepNode::TypeckItemBody, &mut visit);
409 pub fn check_drop_impls(ccx: &CrateCtxt) -> CompileResult {
410 ccx.tcx.sess.track_errors(|| {
411 let _task = ccx.tcx.dep_graph.in_task(DepNode::Dropck);
412 let drop_trait = match ccx.tcx.lang_items.drop_trait() {
413 Some(id) => ccx.tcx.lookup_trait_def(id), None => { return }
415 drop_trait.for_each_impl(ccx.tcx, |drop_impl_did| {
416 let _task = ccx.tcx.dep_graph.in_task(DepNode::DropckImpl(drop_impl_did));
417 if drop_impl_did.is_local() {
418 match dropck::check_drop_impl(ccx.tcx, drop_impl_did) {
421 assert!(ccx.tcx.sess.has_errors());
429 fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
430 decl: &'tcx hir::FnDecl,
431 body: &'tcx hir::Block,
435 param_env: ty::ParameterEnvironment<'a, 'tcx>)
438 ty::TyFnDef(_, _, ref fn_ty) => {
439 let tables = RefCell::new(ty::Tables::empty());
440 let inh = Inherited::new(ccx.tcx, &tables, param_env);
442 // Compute the fty from point of view of inside fn.
443 let fn_scope = ccx.tcx.region_maps.call_site_extent(fn_id, body.id);
445 fn_ty.sig.subst(ccx.tcx, &inh.infcx.parameter_environment.free_substs);
447 ccx.tcx.liberate_late_bound_regions(fn_scope, &fn_sig);
449 inh.normalize_associated_types_in(body.span,
453 let fcx = check_fn(ccx, fn_ty.unsafety, fn_id, &fn_sig,
454 decl, fn_id, body, &inh);
456 fcx.select_all_obligations_and_apply_defaults();
457 upvar::closure_analyze_fn(&fcx, fn_id, decl, body);
458 fcx.select_obligations_where_possible();
460 fcx.select_all_obligations_or_error(); // Casts can introduce new obligations.
462 regionck::regionck_fn(&fcx, fn_id, fn_span, decl, body);
463 writeback::resolve_type_vars_in_fn(&fcx, decl, body);
465 _ => ccx.tcx.sess.impossible_case(body.span,
466 "check_bare_fn: function type expected")
470 struct GatherLocalsVisitor<'a, 'tcx: 'a> {
471 fcx: &'a FnCtxt<'a, 'tcx>
474 impl<'a, 'tcx> GatherLocalsVisitor<'a, 'tcx> {
475 fn assign(&mut self, _span: Span, nid: ast::NodeId, ty_opt: Option<Ty<'tcx>>) -> Ty<'tcx> {
478 // infer the variable's type
479 let var_ty = self.fcx.infcx().next_ty_var();
480 self.fcx.inh.locals.borrow_mut().insert(nid, var_ty);
484 // take type that the user specified
485 self.fcx.inh.locals.borrow_mut().insert(nid, typ);
492 impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> {
493 // Add explicitly-declared locals.
494 fn visit_local(&mut self, local: &'tcx hir::Local) {
495 let o_ty = match local.ty {
496 Some(ref ty) => Some(self.fcx.to_ty(&ty)),
499 self.assign(local.span, local.id, o_ty);
500 debug!("Local variable {:?} is assigned type {}",
502 self.fcx.infcx().ty_to_string(
503 self.fcx.inh.locals.borrow().get(&local.id).unwrap().clone()));
504 intravisit::walk_local(self, local);
507 // Add pattern bindings.
508 fn visit_pat(&mut self, p: &'tcx hir::Pat) {
509 if let PatKind::Ident(_, ref path1, _) = p.node {
510 if pat_util::pat_is_binding(&self.fcx.ccx.tcx.def_map.borrow(), p) {
511 let var_ty = self.assign(p.span, p.id, None);
513 self.fcx.require_type_is_sized(var_ty, p.span,
514 traits::VariableType(p.id));
516 debug!("Pattern binding {} is assigned to {} with type {:?}",
518 self.fcx.infcx().ty_to_string(
519 self.fcx.inh.locals.borrow().get(&p.id).unwrap().clone()),
523 intravisit::walk_pat(self, p);
526 fn visit_block(&mut self, b: &'tcx hir::Block) {
527 // non-obvious: the `blk` variable maps to region lb, so
528 // we have to keep this up-to-date. This
529 // is... unfortunate. It'd be nice to not need this.
530 intravisit::walk_block(self, b);
533 // Since an expr occurs as part of the type fixed size arrays we
534 // need to record the type for that node
535 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
537 hir::TyFixedLengthVec(ref ty, ref count_expr) => {
539 check_expr_with_hint(self.fcx, &count_expr, self.fcx.tcx().types.usize);
541 hir::TyBareFn(ref function_declaration) => {
542 intravisit::walk_fn_decl_nopat(self, &function_declaration.decl);
543 walk_list!(self, visit_lifetime_def, &function_declaration.lifetimes);
545 _ => intravisit::walk_ty(self, t)
549 // Don't descend into the bodies of nested closures
550 fn visit_fn(&mut self, _: intravisit::FnKind<'tcx>, _: &'tcx hir::FnDecl,
551 _: &'tcx hir::Block, _: Span, _: ast::NodeId) { }
554 /// Helper used by check_bare_fn and check_expr_fn. Does the grungy work of checking a function
555 /// body and returns the function context used for that purpose, since in the case of a fn item
556 /// there is still a bit more to do.
559 /// * inherited: other fields inherited from the enclosing fn (if any)
560 fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>,
561 unsafety: hir::Unsafety,
562 unsafety_id: ast::NodeId,
563 fn_sig: &ty::FnSig<'tcx>,
564 decl: &'tcx hir::FnDecl,
566 body: &'tcx hir::Block,
567 inherited: &'a Inherited<'a, 'tcx>)
571 let err_count_on_creation = tcx.sess.err_count();
573 let arg_tys = &fn_sig.inputs;
574 let ret_ty = fn_sig.output;
576 debug!("check_fn(arg_tys={:?}, ret_ty={:?}, fn_id={})",
581 // Create the function context. This is either derived from scratch or,
582 // in the case of function expressions, based on the outer context.
585 writeback_errors: Cell::new(false),
586 err_count_on_creation: err_count_on_creation,
588 ps: RefCell::new(UnsafetyState::function(unsafety, unsafety_id)),
593 if let ty::FnConverging(ret_ty) = ret_ty {
594 fcx.require_type_is_sized(ret_ty, decl.output.span(), traits::ReturnType);
597 debug!("fn-sig-map: fn_id={} fn_sig={:?}", fn_id, fn_sig);
599 inherited.tables.borrow_mut().liberated_fn_sigs.insert(fn_id, fn_sig.clone());
602 let mut visit = GatherLocalsVisitor { fcx: &fcx, };
604 // Add formal parameters.
605 for (arg_ty, input) in arg_tys.iter().zip(&decl.inputs) {
606 // The type of the argument must be well-formed.
608 // NB -- this is now checked in wfcheck, but that
609 // currently only results in warnings, so we issue an
610 // old-style WF obligation here so that we still get the
611 // errors that we used to get.
612 fcx.register_old_wf_obligation(arg_ty, input.ty.span, traits::MiscObligation);
614 // Create type variables for each argument.
615 pat_util::pat_bindings(
618 |_bm, pat_id, sp, _path| {
619 let var_ty = visit.assign(sp, pat_id, None);
620 fcx.require_type_is_sized(var_ty, sp,
621 traits::VariableType(pat_id));
624 // Check the pattern.
627 map: pat_id_map(&tcx.def_map, &input.pat),
629 _match::check_pat(&pcx, &input.pat, *arg_ty);
632 visit.visit_block(body);
635 check_block_with_expected(&fcx, body, match ret_ty {
636 ty::FnConverging(result_type) => ExpectHasType(result_type),
637 ty::FnDiverging => NoExpectation
640 for (input, arg) in decl.inputs.iter().zip(arg_tys) {
641 fcx.write_ty(input.id, arg);
647 pub fn check_struct(ccx: &CrateCtxt, id: ast::NodeId, span: Span) {
650 check_representable(tcx, span, id, "struct");
652 if tcx.lookup_simd(ccx.tcx.map.local_def_id(id)) {
653 check_simd(tcx, span, id);
657 pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
658 debug!("check_item_type(it.id={}, it.name={})",
660 ccx.tcx.item_path_str(ccx.tcx.map.local_def_id(it.id)));
661 let _indenter = indenter();
663 // Consts can play a role in type-checking, so they are included here.
664 hir::ItemStatic(_, _, ref e) |
665 hir::ItemConst(_, ref e) => check_const(ccx, it.span, &e, it.id),
666 hir::ItemEnum(ref enum_definition, _) => {
667 check_enum_variants(ccx,
669 &enum_definition.variants,
672 hir::ItemFn(..) => {} // entirely within check_item_body
673 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
674 debug!("ItemImpl {} with id {}", it.name, it.id);
675 let impl_def_id = ccx.tcx.map.local_def_id(it.id);
676 match ccx.tcx.impl_trait_ref(impl_def_id) {
677 Some(impl_trait_ref) => {
678 check_impl_items_against_trait(ccx,
687 hir::ItemTrait(_, ref generics, _, _) => {
688 check_trait_on_unimplemented(ccx, generics, it);
690 hir::ItemStruct(..) => {
691 check_struct(ccx, it.id, it.span);
693 hir::ItemTy(_, ref generics) => {
694 let pty_ty = ccx.tcx.node_id_to_type(it.id);
695 check_bounds_are_used(ccx, &generics.ty_params, pty_ty);
697 hir::ItemForeignMod(ref m) => {
698 if m.abi == Abi::RustIntrinsic {
699 for item in &m.items {
700 intrinsic::check_intrinsic_type(ccx, item);
702 } else if m.abi == Abi::PlatformIntrinsic {
703 for item in &m.items {
704 intrinsic::check_platform_intrinsic_type(ccx, item);
707 for item in &m.items {
708 let pty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(item.id));
709 if !pty.generics.types.is_empty() {
710 let mut err = struct_span_err!(ccx.tcx.sess, item.span, E0044,
711 "foreign items may not have type parameters");
712 span_help!(&mut err, item.span,
713 "consider using specialization instead of \
718 if let hir::ForeignItemFn(ref fn_decl, _) = item.node {
719 require_c_abi_if_variadic(ccx.tcx, fn_decl, m.abi, item.span);
724 _ => {/* nothing to do */ }
728 pub fn check_item_body<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
729 debug!("check_item_body(it.id={}, it.name={})",
731 ccx.tcx.item_path_str(ccx.tcx.map.local_def_id(it.id)));
732 let _indenter = indenter();
734 hir::ItemFn(ref decl, _, _, _, _, ref body) => {
735 let fn_pty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(it.id));
736 let param_env = ParameterEnvironment::for_item(ccx.tcx, it.id);
737 check_bare_fn(ccx, &decl, &body, it.id, it.span, fn_pty.ty, param_env);
739 hir::ItemImpl(_, _, _, _, _, ref impl_items) => {
740 debug!("ItemImpl {} with id {}", it.name, it.id);
742 let impl_pty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(it.id));
744 for impl_item in impl_items {
745 match impl_item.node {
746 hir::ImplItemKind::Const(_, ref expr) => {
747 check_const(ccx, impl_item.span, &expr, impl_item.id)
749 hir::ImplItemKind::Method(ref sig, ref body) => {
750 check_method_body(ccx, &impl_pty.generics, sig, body,
751 impl_item.id, impl_item.span);
753 hir::ImplItemKind::Type(_) => {
754 // Nothing to do here.
759 hir::ItemTrait(_, _, _, ref trait_items) => {
760 let trait_def = ccx.tcx.lookup_trait_def(ccx.tcx.map.local_def_id(it.id));
761 for trait_item in trait_items {
762 match trait_item.node {
763 hir::ConstTraitItem(_, Some(ref expr)) => {
764 check_const(ccx, trait_item.span, &expr, trait_item.id)
766 hir::MethodTraitItem(ref sig, Some(ref body)) => {
767 check_trait_fn_not_const(ccx, trait_item.span, sig.constness);
769 check_method_body(ccx, &trait_def.generics, sig, body,
770 trait_item.id, trait_item.span);
772 hir::MethodTraitItem(ref sig, None) => {
773 check_trait_fn_not_const(ccx, trait_item.span, sig.constness);
775 hir::ConstTraitItem(_, None) |
776 hir::TypeTraitItem(..) => {
782 _ => {/* nothing to do */ }
786 fn check_trait_fn_not_const<'a,'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
788 constness: hir::Constness)
791 hir::Constness::NotConst => {
794 hir::Constness::Const => {
795 span_err!(ccx.tcx.sess, span, E0379, "trait fns cannot be declared const");
800 fn check_trait_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
801 generics: &hir::Generics,
803 if let Some(ref attr) = item.attrs.iter().find(|a| {
804 a.check_name("rustc_on_unimplemented")
806 if let Some(ref istring) = attr.value_str() {
807 let parser = Parser::new(&istring);
808 let types = &generics.ty_params;
809 for token in parser {
811 Piece::String(_) => (), // Normal string, no need to check it
812 Piece::NextArgument(a) => match a.position {
813 // `{Self}` is allowed
814 Position::ArgumentNamed(s) if s == "Self" => (),
815 // So is `{A}` if A is a type parameter
816 Position::ArgumentNamed(s) => match types.iter().find(|t| {
821 span_err!(ccx.tcx.sess, attr.span, E0230,
822 "there is no type parameter \
827 // `{:1}` and `{}` are not to be used
828 Position::ArgumentIs(_) | Position::ArgumentNext => {
829 span_err!(ccx.tcx.sess, attr.span, E0231,
830 "only named substitution \
831 parameters are allowed");
837 span_err!(ccx.tcx.sess, attr.span, E0232,
838 "this attribute must have a value, \
839 eg `#[rustc_on_unimplemented = \"foo\"]`")
844 /// Type checks a method body.
848 /// * `item_generics`: generics defined on the impl/trait that contains
850 /// * `self_bound`: bound for the `Self` type parameter, if any
851 /// * `method`: the method definition
852 fn check_method_body<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
853 item_generics: &ty::Generics<'tcx>,
854 sig: &'tcx hir::MethodSig,
855 body: &'tcx hir::Block,
856 id: ast::NodeId, span: Span) {
857 debug!("check_method_body(item_generics={:?}, id={})",
859 let param_env = ParameterEnvironment::for_item(ccx.tcx, id);
861 let fty = ccx.tcx.node_id_to_type(id);
862 debug!("check_method_body: fty={:?}", fty);
864 check_bare_fn(ccx, &sig.decl, body, id, span, fty, param_env);
867 fn report_forbidden_specialization(tcx: &TyCtxt,
868 impl_item: &hir::ImplItem,
871 let mut err = struct_span_err!(
872 tcx.sess, impl_item.span, E0520,
873 "item `{}` is provided by an `impl` that specializes \
874 another, but the item in the parent `impl` is not \
875 marked `default` and so it cannot be specialized.",
878 match tcx.span_of_impl(parent_impl) {
880 err.span_note(span, "parent implementation is here:");
883 err.note(&format!("parent implementation is in crate `{}`", cname));
890 fn check_specialization_validity<'tcx>(tcx: &TyCtxt<'tcx>, trait_def: &ty::TraitDef<'tcx>,
891 impl_id: DefId, impl_item: &hir::ImplItem)
893 let ancestors = trait_def.ancestors(impl_id);
895 let parent = match impl_item.node {
896 hir::ImplItemKind::Const(..) => {
897 ancestors.const_defs(tcx, impl_item.name).skip(1).next()
898 .map(|node_item| node_item.map(|parent| parent.defaultness))
900 hir::ImplItemKind::Method(..) => {
901 ancestors.fn_defs(tcx, impl_item.name).skip(1).next()
902 .map(|node_item| node_item.map(|parent| parent.defaultness))
905 hir::ImplItemKind::Type(_) => {
906 ancestors.type_defs(tcx, impl_item.name).skip(1).next()
907 .map(|node_item| node_item.map(|parent| parent.defaultness))
911 if let Some(parent) = parent {
912 if parent.item.is_final() {
913 report_forbidden_specialization(tcx, impl_item, parent.node.def_id());
919 fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
922 impl_trait_ref: &ty::TraitRef<'tcx>,
923 impl_items: &[hir::ImplItem]) {
924 // If the trait reference itself is erroneous (so the compilation is going
925 // to fail), skip checking the items here -- the `impl_item` table in `tcx`
926 // isn't populated for such impls.
927 if impl_trait_ref.references_error() { return; }
929 // Locate trait definition and items
931 let trait_def = tcx.lookup_trait_def(impl_trait_ref.def_id);
932 let trait_items = tcx.trait_items(impl_trait_ref.def_id);
933 let mut overridden_associated_type = None;
935 // Check existing impl methods to see if they are both present in trait
936 // and compatible with trait signature
937 for impl_item in impl_items {
938 let ty_impl_item = ccx.tcx.impl_or_trait_item(ccx.tcx.map.local_def_id(impl_item.id));
939 let ty_trait_item = trait_items.iter()
940 .find(|ac| ac.name() == ty_impl_item.name());
942 // Check that impl definition matches trait definition
943 if let Some(ty_trait_item) = ty_trait_item {
944 match impl_item.node {
945 hir::ImplItemKind::Const(..) => {
946 let impl_const = match ty_impl_item {
947 ty::ConstTraitItem(ref cti) => cti,
948 _ => tcx.sess.span_bug(impl_item.span, "non-const impl-item for const")
951 // Find associated const definition.
952 if let &ty::ConstTraitItem(ref trait_const) = ty_trait_item {
953 compare_const_impl(ccx.tcx,
959 span_err!(tcx.sess, impl_item.span, E0323,
960 "item `{}` is an associated const, \
961 which doesn't match its trait `{:?}`",
966 hir::ImplItemKind::Method(ref sig, ref body) => {
967 check_trait_fn_not_const(ccx, impl_item.span, sig.constness);
969 let impl_method = match ty_impl_item {
970 ty::MethodTraitItem(ref mti) => mti,
971 _ => tcx.sess.span_bug(impl_item.span, "non-method impl-item for method")
974 if let &ty::MethodTraitItem(ref trait_method) = ty_trait_item {
975 compare_impl_method(ccx.tcx,
982 span_err!(tcx.sess, impl_item.span, E0324,
983 "item `{}` is an associated method, \
984 which doesn't match its trait `{:?}`",
989 hir::ImplItemKind::Type(_) => {
990 let impl_type = match ty_impl_item {
991 ty::TypeTraitItem(ref tti) => tti,
992 _ => tcx.sess.span_bug(impl_item.span, "non-type impl-item for type")
995 if let &ty::TypeTraitItem(ref at) = ty_trait_item {
996 if let Some(_) = at.ty {
997 overridden_associated_type = Some(impl_item);
1000 span_err!(tcx.sess, impl_item.span, E0325,
1001 "item `{}` is an associated type, \
1002 which doesn't match its trait `{:?}`",
1010 check_specialization_validity(tcx, trait_def, impl_id, impl_item);
1013 // Check for missing items from trait
1014 let provided_methods = tcx.provided_trait_methods(impl_trait_ref.def_id);
1015 let mut missing_items = Vec::new();
1016 let mut invalidated_items = Vec::new();
1017 let associated_type_overridden = overridden_associated_type.is_some();
1018 for trait_item in trait_items.iter() {
1023 ty::ConstTraitItem(ref associated_const) => {
1024 is_provided = associated_const.has_value;
1025 is_implemented = impl_items.iter().any(|ii| {
1027 hir::ImplItemKind::Const(..) => {
1028 ii.name == associated_const.name
1034 ty::MethodTraitItem(ref trait_method) => {
1035 is_provided = provided_methods.iter().any(|m| m.name == trait_method.name);
1036 is_implemented = trait_def.ancestors(impl_id)
1037 .fn_defs(tcx, trait_method.name)
1039 .map(|node_item| !node_item.node.is_from_trait())
1042 ty::TypeTraitItem(ref trait_assoc_ty) => {
1043 is_provided = trait_assoc_ty.ty.is_some();
1044 is_implemented = trait_def.ancestors(impl_id)
1045 .type_defs(tcx, trait_assoc_ty.name)
1047 .map(|node_item| !node_item.node.is_from_trait())
1052 if !is_implemented {
1054 missing_items.push(trait_item.name());
1055 } else if associated_type_overridden {
1056 invalidated_items.push(trait_item.name());
1061 if !missing_items.is_empty() {
1062 span_err!(tcx.sess, impl_span, E0046,
1063 "not all trait items implemented, missing: `{}`",
1064 missing_items.iter()
1065 .map(|name| name.to_string())
1066 .collect::<Vec<_>>().join("`, `"))
1069 if !invalidated_items.is_empty() {
1070 let invalidator = overridden_associated_type.unwrap();
1071 span_err!(tcx.sess, invalidator.span, E0399,
1072 "the following trait items need to be reimplemented \
1073 as `{}` was overridden: `{}`",
1075 invalidated_items.iter()
1076 .map(|name| name.to_string())
1077 .collect::<Vec<_>>().join("`, `"))
1081 fn report_cast_to_unsized_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
1088 if t_cast.references_error() || t_expr.references_error() {
1091 let tstr = fcx.infcx().ty_to_string(t_cast);
1092 let mut err = fcx.type_error_struct(span, |actual| {
1093 format!("cast to unsized type: `{}` as `{}`", actual, tstr)
1096 ty::TyRef(_, ty::TypeAndMut { mutbl: mt, .. }) => {
1097 let mtstr = match mt {
1098 hir::MutMutable => "mut ",
1099 hir::MutImmutable => ""
1101 if t_cast.is_trait() {
1102 match fcx.tcx().sess.codemap().span_to_snippet(t_span) {
1104 err.span_suggestion(t_span,
1105 "try casting to a reference instead:",
1106 format!("&{}{}", mtstr, s));
1109 span_help!(err, t_span,
1110 "did you mean `&{}{}`?", mtstr, tstr),
1113 span_help!(err, span,
1114 "consider using an implicit coercion to `&{}{}` instead",
1119 match fcx.tcx().sess.codemap().span_to_snippet(t_span) {
1121 err.span_suggestion(t_span,
1122 "try casting to a `Box` instead:",
1123 format!("Box<{}>", s));
1126 span_help!(err, t_span, "did you mean `Box<{}>`?", tstr),
1130 span_help!(err, e_span,
1131 "consider using a box or reference as appropriate");
1135 fcx.write_error(id);
1139 impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> {
1140 fn tcx(&self) -> &TyCtxt<'tcx> { self.ccx.tcx }
1142 fn get_item_type_scheme(&self, _: Span, id: DefId)
1143 -> Result<ty::TypeScheme<'tcx>, ErrorReported>
1145 Ok(self.tcx().lookup_item_type(id))
1148 fn get_trait_def(&self, _: Span, id: DefId)
1149 -> Result<&'tcx ty::TraitDef<'tcx>, ErrorReported>
1151 Ok(self.tcx().lookup_trait_def(id))
1154 fn ensure_super_predicates(&self, _: Span, _: DefId) -> Result<(), ErrorReported> {
1155 // all super predicates are ensured during collect pass
1159 fn get_free_substs(&self) -> Option<&Substs<'tcx>> {
1160 Some(&self.inh.infcx.parameter_environment.free_substs)
1163 fn get_type_parameter_bounds(&self,
1165 node_id: ast::NodeId)
1166 -> Result<Vec<ty::PolyTraitRef<'tcx>>, ErrorReported>
1168 let def = self.tcx().type_parameter_def(node_id);
1169 let r = self.inh.infcx.parameter_environment
1172 .filter_map(|predicate| {
1174 ty::Predicate::Trait(ref data) => {
1175 if data.0.self_ty().is_param(def.space, def.index) {
1176 Some(data.to_poly_trait_ref())
1190 fn trait_defines_associated_type_named(&self,
1191 trait_def_id: DefId,
1192 assoc_name: ast::Name)
1195 let trait_def = self.ccx.tcx.lookup_trait_def(trait_def_id);
1196 trait_def.associated_type_names.contains(&assoc_name)
1200 ty_param_def: Option<ty::TypeParameterDef<'tcx>>,
1201 substs: Option<&mut subst::Substs<'tcx>>,
1202 space: Option<subst::ParamSpace>,
1203 span: Span) -> Ty<'tcx> {
1204 // Grab the default doing subsitution
1205 let default = ty_param_def.and_then(|def| {
1206 def.default.map(|ty| type_variable::Default {
1207 ty: ty.subst_spanned(self.tcx(), substs.as_ref().unwrap(), Some(span)),
1209 def_id: def.default_def_id
1213 let ty_var = self.infcx().next_ty_var_with_default(default);
1215 // Finally we add the type variable to the substs
1218 Some(substs) => { substs.types.push(space.unwrap(), ty_var); ty_var }
1222 fn projected_ty_from_poly_trait_ref(&self,
1224 poly_trait_ref: ty::PolyTraitRef<'tcx>,
1225 item_name: ast::Name)
1228 let (trait_ref, _) =
1229 self.infcx().replace_late_bound_regions_with_fresh_var(
1231 infer::LateBoundRegionConversionTime::AssocTypeProjection(item_name),
1234 self.normalize_associated_type(span, trait_ref, item_name)
1237 fn projected_ty(&self,
1239 trait_ref: ty::TraitRef<'tcx>,
1240 item_name: ast::Name)
1243 self.normalize_associated_type(span, trait_ref, item_name)
1247 impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
1248 fn tcx(&self) -> &TyCtxt<'tcx> { self.ccx.tcx }
1250 pub fn infcx(&self) -> &infer::InferCtxt<'a,'tcx> {
1254 pub fn param_env(&self) -> &ty::ParameterEnvironment<'a,'tcx> {
1255 &self.inh.infcx.parameter_environment
1258 pub fn sess(&self) -> &Session {
1262 pub fn err_count_since_creation(&self) -> usize {
1263 self.ccx.tcx.sess.err_count() - self.err_count_on_creation
1266 /// Resolves type variables in `ty` if possible. Unlike the infcx
1267 /// version, this version will also select obligations if it seems
1268 /// useful, in an effort to get more type information.
1269 fn resolve_type_vars_if_possible(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> {
1270 debug!("resolve_type_vars_if_possible(ty={:?})", ty);
1272 // No TyInfer()? Nothing needs doing.
1273 if !ty.has_infer_types() {
1274 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1278 // If `ty` is a type variable, see whether we already know what it is.
1279 ty = self.infcx().resolve_type_vars_if_possible(&ty);
1280 if !ty.has_infer_types() {
1281 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1285 // If not, try resolving pending obligations as much as
1286 // possible. This can help substantially when there are
1287 // indirect dependencies that don't seem worth tracking
1289 self.select_obligations_where_possible();
1290 ty = self.infcx().resolve_type_vars_if_possible(&ty);
1292 debug!("resolve_type_vars_if_possible: ty={:?}", ty);
1296 fn record_deferred_call_resolution(&self,
1297 closure_def_id: DefId,
1298 r: DeferredCallResolutionHandler<'tcx>) {
1299 let mut deferred_call_resolutions = self.inh.deferred_call_resolutions.borrow_mut();
1300 deferred_call_resolutions.entry(closure_def_id).or_insert(vec![]).push(r);
1303 fn remove_deferred_call_resolutions(&self,
1304 closure_def_id: DefId)
1305 -> Vec<DeferredCallResolutionHandler<'tcx>>
1307 let mut deferred_call_resolutions = self.inh.deferred_call_resolutions.borrow_mut();
1308 deferred_call_resolutions.remove(&closure_def_id).unwrap_or(Vec::new())
1311 pub fn tag(&self) -> String {
1312 let self_ptr: *const FnCtxt = self;
1313 format!("{:?}", self_ptr)
1316 pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> Ty<'tcx> {
1317 match self.inh.locals.borrow().get(&nid) {
1320 span_err!(self.tcx().sess, span, E0513,
1321 "no type for local variable {}",
1323 self.tcx().types.err
1329 pub fn write_ty(&self, node_id: ast::NodeId, ty: Ty<'tcx>) {
1330 debug!("write_ty({}, {:?}) in fcx {}",
1331 node_id, ty, self.tag());
1332 self.inh.tables.borrow_mut().node_types.insert(node_id, ty);
1335 pub fn write_substs(&self, node_id: ast::NodeId, substs: ty::ItemSubsts<'tcx>) {
1336 if !substs.substs.is_noop() {
1337 debug!("write_substs({}, {:?}) in fcx {}",
1342 self.inh.tables.borrow_mut().item_substs.insert(node_id, substs);
1346 pub fn write_autoderef_adjustment(&self,
1347 node_id: ast::NodeId,
1349 self.write_adjustment(
1351 adjustment::AdjustDerefRef(adjustment::AutoDerefRef {
1359 pub fn write_adjustment(&self,
1360 node_id: ast::NodeId,
1361 adj: adjustment::AutoAdjustment<'tcx>) {
1362 debug!("write_adjustment(node_id={}, adj={:?})", node_id, adj);
1364 if adj.is_identity() {
1368 self.inh.tables.borrow_mut().adjustments.insert(node_id, adj);
1371 /// Basically whenever we are converting from a type scheme into
1372 /// the fn body space, we always want to normalize associated
1373 /// types as well. This function combines the two.
1374 fn instantiate_type_scheme<T>(&self,
1376 substs: &Substs<'tcx>,
1379 where T : TypeFoldable<'tcx>
1381 let value = value.subst(self.tcx(), substs);
1382 let result = self.normalize_associated_types_in(span, &value);
1383 debug!("instantiate_type_scheme(value={:?}, substs={:?}) = {:?}",
1390 /// As `instantiate_type_scheme`, but for the bounds found in a
1391 /// generic type scheme.
1392 fn instantiate_bounds(&self,
1394 substs: &Substs<'tcx>,
1395 bounds: &ty::GenericPredicates<'tcx>)
1396 -> ty::InstantiatedPredicates<'tcx>
1398 ty::InstantiatedPredicates {
1399 predicates: self.instantiate_type_scheme(span, substs, &bounds.predicates)
1404 fn normalize_associated_types_in<T>(&self, span: Span, value: &T) -> T
1405 where T : TypeFoldable<'tcx>
1407 self.inh.normalize_associated_types_in(span, self.body_id, value)
1410 fn normalize_associated_type(&self,
1412 trait_ref: ty::TraitRef<'tcx>,
1413 item_name: ast::Name)
1416 let cause = traits::ObligationCause::new(span,
1418 traits::ObligationCauseCode::MiscObligation);
1422 .normalize_projection_type(self.infcx(),
1424 trait_ref: trait_ref,
1425 item_name: item_name,
1430 /// Instantiates the type in `did` with the generics in `path` and returns
1431 /// it (registering the necessary trait obligations along the way).
1433 /// Note that this function is only intended to be used with type-paths,
1434 /// not with value-paths.
1435 pub fn instantiate_type(&self,
1440 debug!("instantiate_type(did={:?}, path={:?})", did, path);
1442 self.tcx().lookup_item_type(did);
1443 let type_predicates =
1444 self.tcx().lookup_predicates(did);
1445 let substs = astconv::ast_path_substs_for_ty(self, self,
1447 PathParamMode::Optional,
1448 &type_scheme.generics,
1449 path.segments.last().unwrap());
1450 debug!("instantiate_type: ty={:?} substs={:?}", &type_scheme.ty, &substs);
1452 self.instantiate_bounds(path.span, &substs, &type_predicates);
1453 self.add_obligations_for_parameters(
1454 traits::ObligationCause::new(
1457 traits::ItemObligation(did)),
1460 self.instantiate_type_scheme(path.span, &substs, &type_scheme.ty)
1463 /// Return the dict-like variant corresponding to a given `Def`.
1464 pub fn def_struct_variant(&self,
1467 -> Option<(ty::AdtDef<'tcx>, ty::VariantDef<'tcx>)>
1469 let (adt, variant) = match def {
1470 Def::Variant(enum_id, variant_id) => {
1471 let adt = self.tcx().lookup_adt_def(enum_id);
1472 (adt, adt.variant_with_id(variant_id))
1474 Def::Struct(did) | Def::TyAlias(did) => {
1475 let typ = self.tcx().lookup_item_type(did);
1476 if let ty::TyStruct(adt, _) = typ.ty.sty {
1477 (adt, adt.struct_variant())
1485 let var_kind = variant.kind();
1486 if var_kind == ty::VariantKind::Struct {
1487 Some((adt, variant))
1488 } else if var_kind == ty::VariantKind::Unit {
1489 Some((adt, variant))
1495 pub fn write_nil(&self, node_id: ast::NodeId) {
1496 self.write_ty(node_id, self.tcx().mk_nil());
1498 pub fn write_error(&self, node_id: ast::NodeId) {
1499 self.write_ty(node_id, self.tcx().types.err);
1502 pub fn require_type_meets(&self,
1505 code: traits::ObligationCauseCode<'tcx>,
1506 bound: ty::BuiltinBound)
1508 self.register_builtin_bound(
1511 traits::ObligationCause::new(span, self.body_id, code));
1514 pub fn require_type_is_sized(&self,
1517 code: traits::ObligationCauseCode<'tcx>)
1519 self.require_type_meets(ty, span, code, ty::BoundSized);
1522 pub fn require_expr_have_sized_type(&self,
1524 code: traits::ObligationCauseCode<'tcx>)
1526 self.require_type_is_sized(self.expr_ty(expr), expr.span, code);
1529 pub fn type_is_known_to_be_sized(&self,
1534 traits::type_known_to_meet_builtin_bound(self.infcx(),
1540 pub fn register_builtin_bound(&self,
1542 builtin_bound: ty::BuiltinBound,
1543 cause: traits::ObligationCause<'tcx>)
1545 self.inh.fulfillment_cx.borrow_mut()
1546 .register_builtin_bound(self.infcx(), ty, builtin_bound, cause);
1549 pub fn register_predicate(&self,
1550 obligation: traits::PredicateObligation<'tcx>)
1552 debug!("register_predicate({:?})",
1554 self.inh.fulfillment_cx
1556 .register_predicate_obligation(self.infcx(), obligation);
1559 pub fn to_ty(&self, ast_t: &hir::Ty) -> Ty<'tcx> {
1560 let t = ast_ty_to_ty(self, self, ast_t);
1561 self.register_wf_obligation(t, ast_t.span, traits::MiscObligation);
1565 pub fn expr_ty(&self, ex: &hir::Expr) -> Ty<'tcx> {
1566 match self.inh.tables.borrow().node_types.get(&ex.id) {
1569 self.tcx().sess.bug(&format!("no type for expr in fcx {}",
1575 /// Apply `adjustment` to the type of `expr`
1576 pub fn adjust_expr_ty(&self,
1578 adjustment: Option<&adjustment::AutoAdjustment<'tcx>>)
1581 let raw_ty = self.expr_ty(expr);
1582 let raw_ty = self.infcx().shallow_resolve(raw_ty);
1583 let resolve_ty = |ty: Ty<'tcx>| self.infcx().resolve_type_vars_if_possible(&ty);
1584 raw_ty.adjust(self.tcx(), expr.span, expr.id, adjustment, |method_call| {
1585 self.inh.tables.borrow().method_map.get(&method_call)
1586 .map(|method| resolve_ty(method.ty))
1590 pub fn node_ty(&self, id: ast::NodeId) -> Ty<'tcx> {
1591 match self.inh.tables.borrow().node_types.get(&id) {
1593 None if self.err_count_since_creation() != 0 => self.tcx().types.err,
1595 self.tcx().sess.bug(
1596 &format!("no type for node {}: {} in fcx {}",
1597 id, self.tcx().map.node_to_string(id),
1603 pub fn item_substs(&self) -> Ref<NodeMap<ty::ItemSubsts<'tcx>>> {
1604 // NOTE: @jroesch this is hack that appears to be fixed on nightly, will monitor if
1605 // it changes when we upgrade the snapshot compiler
1606 fn project_item_susbts<'a, 'tcx>(tables: &'a ty::Tables<'tcx>)
1607 -> &'a NodeMap<ty::ItemSubsts<'tcx>> {
1611 Ref::map(self.inh.tables.borrow(), project_item_susbts)
1614 pub fn opt_node_ty_substs<F>(&self,
1617 F: FnOnce(&ty::ItemSubsts<'tcx>),
1619 match self.inh.tables.borrow().item_substs.get(&id) {
1625 pub fn mk_subty(&self,
1626 a_is_expected: bool,
1630 -> Result<(), TypeError<'tcx>> {
1631 infer::mk_subty(self.infcx(), a_is_expected, origin, sub, sup)
1634 pub fn mk_eqty(&self,
1635 a_is_expected: bool,
1639 -> Result<(), TypeError<'tcx>> {
1640 infer::mk_eqty(self.infcx(), a_is_expected, origin, sub, sup)
1643 pub fn mk_subr(&self,
1644 origin: infer::SubregionOrigin<'tcx>,
1647 infer::mk_subr(self.infcx(), origin, sub, sup)
1650 pub fn type_error_message<M>(&self,
1653 actual_ty: Ty<'tcx>,
1654 err: Option<&TypeError<'tcx>>)
1655 where M: FnOnce(String) -> String,
1657 self.infcx().type_error_message(sp, mk_msg, actual_ty, err);
1660 pub fn type_error_struct<M>(&self,
1663 actual_ty: Ty<'tcx>,
1664 err: Option<&TypeError<'tcx>>)
1665 -> DiagnosticBuilder<'tcx>
1666 where M: FnOnce(String) -> String,
1668 self.infcx().type_error_struct(sp, mk_msg, actual_ty, err)
1671 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1672 /// outlive the region `r`.
1673 pub fn register_region_obligation(&self,
1676 cause: traits::ObligationCause<'tcx>)
1678 let mut fulfillment_cx = self.inh.fulfillment_cx.borrow_mut();
1679 fulfillment_cx.register_region_obligation(ty, region, cause);
1682 /// Registers an obligation for checking later, during regionck, that the type `ty` must
1683 /// outlive the region `r`.
1684 pub fn register_wf_obligation(&self,
1687 code: traits::ObligationCauseCode<'tcx>)
1689 // WF obligations never themselves fail, so no real need to give a detailed cause:
1690 let cause = traits::ObligationCause::new(span, self.body_id, code);
1691 self.register_predicate(traits::Obligation::new(cause, ty::Predicate::WellFormed(ty)));
1694 pub fn register_old_wf_obligation(&self,
1697 code: traits::ObligationCauseCode<'tcx>)
1699 // Registers an "old-style" WF obligation that uses the
1700 // implicator code. This is basically a buggy version of
1701 // `register_wf_obligation` that is being kept around
1702 // temporarily just to help with phasing in the newer rules.
1704 // FIXME(#27579) all uses of this should be migrated to register_wf_obligation eventually
1705 let cause = traits::ObligationCause::new(span, self.body_id, code);
1706 self.register_region_obligation(ty, ty::ReEmpty, cause);
1709 /// Registers obligations that all types appearing in `substs` are well-formed.
1710 pub fn add_wf_bounds(&self, substs: &Substs<'tcx>, expr: &hir::Expr)
1712 for &ty in &substs.types {
1713 self.register_wf_obligation(ty, expr.span, traits::MiscObligation);
1717 /// Given a fully substituted set of bounds (`generic_bounds`), and the values with which each
1718 /// type/region parameter was instantiated (`substs`), creates and registers suitable
1719 /// trait/region obligations.
1721 /// For example, if there is a function:
1724 /// fn foo<'a,T:'a>(...)
1727 /// and a reference:
1733 /// Then we will create a fresh region variable `'$0` and a fresh type variable `$1` for `'a`
1734 /// and `T`. This routine will add a region obligation `$1:'$0` and register it locally.
1735 pub fn add_obligations_for_parameters(&self,
1736 cause: traits::ObligationCause<'tcx>,
1737 predicates: &ty::InstantiatedPredicates<'tcx>)
1739 assert!(!predicates.has_escaping_regions());
1741 debug!("add_obligations_for_parameters(predicates={:?})",
1744 for obligation in traits::predicates_for_generics(cause, predicates) {
1745 self.register_predicate(obligation);
1749 // FIXME(arielb1): use this instead of field.ty everywhere
1750 // Only for fields! Returns <none> for methods>
1751 // Indifferent to privacy flags
1752 pub fn field_ty(&self,
1754 field: ty::FieldDef<'tcx>,
1755 substs: &Substs<'tcx>)
1758 self.normalize_associated_types_in(span,
1759 &field.ty(self.tcx(), substs))
1762 fn check_casts(&self) {
1763 let mut deferred_cast_checks = self.inh.deferred_cast_checks.borrow_mut();
1764 for cast in deferred_cast_checks.drain(..) {
1769 /// Apply "fallbacks" to some types
1770 /// ! gets replaced with (), unconstrained ints with i32, and unconstrained floats with f64.
1771 fn default_type_parameters(&self) {
1772 use middle::ty::error::UnconstrainedNumeric::Neither;
1773 use middle::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1774 for ty in &self.infcx().unsolved_variables() {
1775 let resolved = self.infcx().resolve_type_vars_if_possible(ty);
1776 if self.infcx().type_var_diverges(resolved) {
1777 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1779 match self.infcx().type_is_unconstrained_numeric(resolved) {
1780 UnconstrainedInt => {
1781 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
1783 UnconstrainedFloat => {
1784 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
1792 fn select_all_obligations_and_apply_defaults(&self) {
1793 if self.tcx().sess.features.borrow().default_type_parameter_fallback {
1794 self.new_select_all_obligations_and_apply_defaults();
1796 self.old_select_all_obligations_and_apply_defaults();
1800 // Implements old type inference fallback algorithm
1801 fn old_select_all_obligations_and_apply_defaults(&self) {
1802 self.select_obligations_where_possible();
1803 self.default_type_parameters();
1804 self.select_obligations_where_possible();
1807 fn new_select_all_obligations_and_apply_defaults(&self) {
1808 use middle::ty::error::UnconstrainedNumeric::Neither;
1809 use middle::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1811 // For the time being this errs on the side of being memory wasteful but provides better
1813 // let type_variables = self.infcx().type_variables.clone();
1815 // There is a possibility that this algorithm will have to run an arbitrary number of times
1816 // to terminate so we bound it by the compiler's recursion limit.
1817 for _ in 0..self.tcx().sess.recursion_limit.get() {
1818 // First we try to solve all obligations, it is possible that the last iteration
1819 // has made it possible to make more progress.
1820 self.select_obligations_where_possible();
1822 let mut conflicts = Vec::new();
1824 // Collect all unsolved type, integral and floating point variables.
1825 let unsolved_variables = self.inh.infcx.unsolved_variables();
1827 // We must collect the defaults *before* we do any unification. Because we have
1828 // directly attached defaults to the type variables any unification that occurs
1829 // will erase defaults causing conflicting defaults to be completely ignored.
1830 let default_map: FnvHashMap<_, _> =
1833 .filter_map(|t| self.infcx().default(t).map(|d| (t, d)))
1836 let mut unbound_tyvars = HashSet::new();
1838 debug!("select_all_obligations_and_apply_defaults: defaults={:?}", default_map);
1840 // We loop over the unsolved variables, resolving them and if they are
1841 // and unconstrainted numeric type we add them to the set of unbound
1842 // variables. We do this so we only apply literal fallback to type
1843 // variables without defaults.
1844 for ty in &unsolved_variables {
1845 let resolved = self.infcx().resolve_type_vars_if_possible(ty);
1846 if self.infcx().type_var_diverges(resolved) {
1847 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1849 match self.infcx().type_is_unconstrained_numeric(resolved) {
1850 UnconstrainedInt | UnconstrainedFloat => {
1851 unbound_tyvars.insert(resolved);
1858 // We now remove any numeric types that also have defaults, and instead insert
1859 // the type variable with a defined fallback.
1860 for ty in &unsolved_variables {
1861 if let Some(_default) = default_map.get(ty) {
1862 let resolved = self.infcx().resolve_type_vars_if_possible(ty);
1864 debug!("select_all_obligations_and_apply_defaults: ty: {:?} with default: {:?}",
1867 match resolved.sty {
1868 ty::TyInfer(ty::TyVar(_)) => {
1869 unbound_tyvars.insert(ty);
1872 ty::TyInfer(ty::IntVar(_)) | ty::TyInfer(ty::FloatVar(_)) => {
1873 unbound_tyvars.insert(ty);
1874 if unbound_tyvars.contains(resolved) {
1875 unbound_tyvars.remove(resolved);
1884 // If there are no more fallbacks to apply at this point we have applied all possible
1885 // defaults and type inference will proceed as normal.
1886 if unbound_tyvars.is_empty() {
1890 // Finally we go through each of the unbound type variables and unify them with
1891 // the proper fallback, reporting a conflicting default error if any of the
1892 // unifications fail. We know it must be a conflicting default because the
1893 // variable would only be in `unbound_tyvars` and have a concrete value if
1894 // it had been solved by previously applying a default.
1896 // We wrap this in a transaction for error reporting, if we detect a conflict
1897 // we will rollback the inference context to its prior state so we can probe
1898 // for conflicts and correctly report them.
1901 let _ = self.infcx().commit_if_ok(|_: &infer::CombinedSnapshot| {
1902 for ty in &unbound_tyvars {
1903 if self.infcx().type_var_diverges(ty) {
1904 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1906 match self.infcx().type_is_unconstrained_numeric(ty) {
1907 UnconstrainedInt => {
1908 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
1910 UnconstrainedFloat => {
1911 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
1914 if let Some(default) = default_map.get(ty) {
1915 let default = default.clone();
1916 match infer::mk_eqty(self.infcx(), false,
1917 TypeOrigin::Misc(default.origin_span),
1921 conflicts.push((*ty, default));
1930 // If there are conflicts we rollback, otherwise commit
1931 if conflicts.len() > 0 {
1938 if conflicts.len() > 0 {
1939 // Loop through each conflicting default, figuring out the default that caused
1940 // a unification failure and then report an error for each.
1941 for (conflict, default) in conflicts {
1942 let conflicting_default =
1943 self.find_conflicting_default(&unbound_tyvars, &default_map, conflict)
1944 .unwrap_or(type_variable::Default {
1945 ty: self.infcx().next_ty_var(),
1946 origin_span: codemap::DUMMY_SP,
1947 def_id: self.tcx().map.local_def_id(0) // what do I put here?
1950 // This is to ensure that we elimnate any non-determinism from the error
1951 // reporting by fixing an order, it doesn't matter what order we choose
1952 // just that it is consistent.
1953 let (first_default, second_default) =
1954 if default.def_id < conflicting_default.def_id {
1955 (default, conflicting_default)
1957 (conflicting_default, default)
1961 self.infcx().report_conflicting_default_types(
1962 first_default.origin_span,
1969 self.select_obligations_where_possible();
1972 // For use in error handling related to default type parameter fallback. We explicitly
1973 // apply the default that caused conflict first to a local version of the type variable
1974 // table then apply defaults until we find a conflict. That default must be the one
1975 // that caused conflict earlier.
1976 fn find_conflicting_default(&self,
1977 unbound_vars: &HashSet<Ty<'tcx>>,
1978 default_map: &FnvHashMap<&Ty<'tcx>, type_variable::Default<'tcx>>,
1980 -> Option<type_variable::Default<'tcx>> {
1981 use middle::ty::error::UnconstrainedNumeric::Neither;
1982 use middle::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat};
1984 // Ensure that we apply the conflicting default first
1985 let mut unbound_tyvars = Vec::with_capacity(unbound_vars.len() + 1);
1986 unbound_tyvars.push(conflict);
1987 unbound_tyvars.extend(unbound_vars.iter());
1989 let mut result = None;
1990 // We run the same code as above applying defaults in order, this time when
1991 // we find the conflict we just return it for error reporting above.
1993 // We also run this inside snapshot that never commits so we can do error
1994 // reporting for more then one conflict.
1995 for ty in &unbound_tyvars {
1996 if self.infcx().type_var_diverges(ty) {
1997 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil());
1999 match self.infcx().type_is_unconstrained_numeric(ty) {
2000 UnconstrainedInt => {
2001 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32)
2003 UnconstrainedFloat => {
2004 demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64)
2007 if let Some(default) = default_map.get(ty) {
2008 let default = default.clone();
2009 match infer::mk_eqty(self.infcx(), false,
2010 TypeOrigin::Misc(default.origin_span),
2014 result = Some(default);
2026 fn select_all_obligations_or_error(&self) {
2027 debug!("select_all_obligations_or_error");
2029 // upvar inference should have ensured that all deferred call
2030 // resolutions are handled by now.
2031 assert!(self.inh.deferred_call_resolutions.borrow().is_empty());
2033 self.select_all_obligations_and_apply_defaults();
2035 let mut fulfillment_cx = self.inh.fulfillment_cx.borrow_mut();
2036 match fulfillment_cx.select_all_or_error(self.infcx()) {
2038 Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
2042 /// Select as many obligations as we can at present.
2043 fn select_obligations_where_possible(&self) {
2045 self.inh.fulfillment_cx
2047 .select_where_possible(self.infcx())
2050 Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
2054 fn private_item_is_visible(&self, def_id: DefId) -> bool {
2055 match self.tcx().map.as_local_node_id(def_id) {
2056 Some(node_id) => self.tcx().map.private_item_is_visible_from(node_id, self.body_id),
2057 None => false, // Private items from other crates are never visible
2062 impl<'a, 'tcx> RegionScope for FnCtxt<'a, 'tcx> {
2063 fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> {
2064 Some(self.base_object_lifetime_default(span))
2067 fn base_object_lifetime_default(&self, span: Span) -> ty::Region {
2068 // RFC #599 specifies that object lifetime defaults take
2069 // precedence over other defaults. But within a fn body we
2070 // don't have a *default* region, rather we use inference to
2071 // find the *correct* region, which is strictly more general
2072 // (and anyway, within a fn body the right region may not even
2073 // be something the user can write explicitly, since it might
2074 // be some expression).
2075 self.infcx().next_region_var(infer::MiscVariable(span))
2078 fn anon_regions(&self, span: Span, count: usize)
2079 -> Result<Vec<ty::Region>, Option<Vec<ElisionFailureInfo>>> {
2080 Ok((0..count).map(|_| {
2081 self.infcx().next_region_var(infer::MiscVariable(span))
2086 /// Whether `autoderef` requires types to resolve.
2087 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
2088 pub enum UnresolvedTypeAction {
2089 /// Produce an error and return `TyError` whenever a type cannot
2090 /// be resolved (i.e. it is `TyInfer`).
2092 /// Go on without emitting any errors, and return the unresolved
2093 /// type. Useful for probing, e.g. in coercions.
2097 /// Executes an autoderef loop for the type `t`. At each step, invokes `should_stop` to decide
2098 /// whether to terminate the loop. Returns the final type and number of derefs that it performed.
2100 /// Note: this method does not modify the adjustments table. The caller is responsible for
2101 /// inserting an AutoAdjustment record into the `fcx` using one of the suitable methods.
2102 pub fn autoderef<'a, 'b, 'tcx, E, I, T, F>(fcx: &FnCtxt<'a, 'tcx>,
2106 unresolved_type_action: UnresolvedTypeAction,
2107 mut lvalue_pref: LvaluePreference,
2109 -> (Ty<'tcx>, usize, Option<T>)
2110 // FIXME(eddyb) use copyable iterators when that becomes ergonomic.
2112 I: IntoIterator<Item=&'b hir::Expr>,
2113 F: FnMut(Ty<'tcx>, usize) -> Option<T>,
2115 debug!("autoderef(base_ty={:?}, lvalue_pref={:?})",
2116 base_ty, lvalue_pref);
2118 let mut t = base_ty;
2119 for autoderefs in 0..fcx.tcx().sess.recursion_limit.get() {
2120 let resolved_t = match unresolved_type_action {
2121 UnresolvedTypeAction::Error => {
2122 structurally_resolved_type(fcx, sp, t)
2124 UnresolvedTypeAction::Ignore => {
2125 // We can continue even when the type cannot be resolved
2126 // (i.e. it is an inference variable) because `Ty::builtin_deref`
2127 // and `try_overloaded_deref` both simply return `None`
2128 // in such a case without producing spurious errors.
2129 fcx.infcx().resolve_type_vars_if_possible(&t)
2132 if resolved_t.references_error() {
2133 return (resolved_t, autoderefs, None);
2136 match should_stop(resolved_t, autoderefs) {
2137 Some(x) => return (resolved_t, autoderefs, Some(x)),
2141 // Otherwise, deref if type is derefable:
2143 // Super subtle: it might seem as though we should
2144 // pass `opt_expr` to `try_overloaded_deref`, so that
2145 // the (implicit) autoref of using an overloaded deref
2146 // would get added to the adjustment table. However we
2147 // do not do that, because it's kind of a
2148 // "meta-adjustment" -- instead, we just leave it
2149 // unrecorded and know that there "will be" an
2150 // autoref. regionck and other bits of the code base,
2151 // when they encounter an overloaded autoderef, have
2152 // to do some reconstructive surgery. This is a pretty
2153 // complex mess that is begging for a proper MIR.
2154 let mt = if let Some(mt) = resolved_t.builtin_deref(false, lvalue_pref) {
2156 } else if let Some(method) = try_overloaded_deref(fcx, sp, None,
2157 resolved_t, lvalue_pref) {
2158 for expr in maybe_exprs() {
2159 let method_call = MethodCall::autoderef(expr.id, autoderefs as u32);
2160 fcx.inh.tables.borrow_mut().method_map.insert(method_call, method);
2162 make_overloaded_lvalue_return_type(fcx.tcx(), method)
2164 return (resolved_t, autoderefs, None);
2168 if mt.mutbl == hir::MutImmutable {
2169 lvalue_pref = NoPreference;
2173 // We've reached the recursion limit, error gracefully.
2174 span_err!(fcx.tcx().sess, sp, E0055,
2175 "reached the recursion limit while auto-dereferencing {:?}",
2177 (fcx.tcx().types.err, 0, None)
2180 fn try_overloaded_deref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2182 base_expr: Option<&hir::Expr>,
2184 lvalue_pref: LvaluePreference)
2185 -> Option<MethodCallee<'tcx>>
2187 // Try DerefMut first, if preferred.
2188 let method = match (lvalue_pref, fcx.tcx().lang_items.deref_mut_trait()) {
2189 (PreferMutLvalue, Some(trait_did)) => {
2190 method::lookup_in_trait(fcx, span, base_expr,
2191 token::intern("deref_mut"), trait_did,
2197 // Otherwise, fall back to Deref.
2198 let method = match (method, fcx.tcx().lang_items.deref_trait()) {
2199 (None, Some(trait_did)) => {
2200 method::lookup_in_trait(fcx, span, base_expr,
2201 token::intern("deref"), trait_did,
2204 (method, _) => method
2210 /// For the overloaded lvalue expressions (`*x`, `x[3]`), the trait returns a type of `&T`, but the
2211 /// actual type we assign to the *expression* is `T`. So this function just peels off the return
2212 /// type by one layer to yield `T`.
2213 fn make_overloaded_lvalue_return_type<'tcx>(tcx: &TyCtxt<'tcx>,
2214 method: MethodCallee<'tcx>)
2215 -> ty::TypeAndMut<'tcx>
2217 // extract method return type, which will be &T;
2218 // all LB regions should have been instantiated during method lookup
2219 let ret_ty = method.ty.fn_ret();
2220 let ret_ty = tcx.no_late_bound_regions(&ret_ty).unwrap().unwrap();
2222 // method returns &T, but the type as visible to user is T, so deref
2223 ret_ty.builtin_deref(true, NoPreference).unwrap()
2226 fn lookup_indexing<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2228 base_expr: &'tcx hir::Expr,
2231 lvalue_pref: LvaluePreference)
2232 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2234 // FIXME(#18741) -- this is almost but not quite the same as the
2235 // autoderef that normal method probing does. They could likely be
2238 let (ty, autoderefs, final_mt) = autoderef(fcx,
2242 UnresolvedTypeAction::Error,
2245 try_index_step(fcx, MethodCall::expr(expr.id), expr, base_expr,
2246 adj_ty, idx, false, lvalue_pref, idx_ty)
2249 if final_mt.is_some() {
2253 // After we have fully autoderef'd, if the resulting type is [T; n], then
2254 // do a final unsized coercion to yield [T].
2255 if let ty::TyArray(element_ty, _) = ty.sty {
2256 let adjusted_ty = fcx.tcx().mk_slice(element_ty);
2257 try_index_step(fcx, MethodCall::expr(expr.id), expr, base_expr,
2258 adjusted_ty, autoderefs, true, lvalue_pref, idx_ty)
2264 /// To type-check `base_expr[index_expr]`, we progressively autoderef (and otherwise adjust)
2265 /// `base_expr`, looking for a type which either supports builtin indexing or overloaded indexing.
2266 /// This loop implements one step in that search; the autoderef loop is implemented by
2267 /// `lookup_indexing`.
2268 fn try_index_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2269 method_call: MethodCall,
2271 base_expr: &'tcx hir::Expr,
2272 adjusted_ty: Ty<'tcx>,
2275 lvalue_pref: LvaluePreference,
2277 -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
2279 let tcx = fcx.tcx();
2280 debug!("try_index_step(expr={:?}, base_expr.id={:?}, adjusted_ty={:?}, \
2281 autoderefs={}, unsize={}, index_ty={:?})",
2289 let input_ty = fcx.infcx().next_ty_var();
2291 // First, try built-in indexing.
2292 match (adjusted_ty.builtin_index(), &index_ty.sty) {
2293 (Some(ty), &ty::TyUint(ast::UintTy::Us)) | (Some(ty), &ty::TyInfer(ty::IntVar(_))) => {
2294 debug!("try_index_step: success, using built-in indexing");
2295 // If we had `[T; N]`, we should've caught it before unsizing to `[T]`.
2297 fcx.write_autoderef_adjustment(base_expr.id, autoderefs);
2298 return Some((tcx.types.usize, ty));
2303 // Try `IndexMut` first, if preferred.
2304 let method = match (lvalue_pref, tcx.lang_items.index_mut_trait()) {
2305 (PreferMutLvalue, Some(trait_did)) => {
2306 method::lookup_in_trait_adjusted(fcx,
2309 token::intern("index_mut"),
2314 Some(vec![input_ty]))
2319 // Otherwise, fall back to `Index`.
2320 let method = match (method, tcx.lang_items.index_trait()) {
2321 (None, Some(trait_did)) => {
2322 method::lookup_in_trait_adjusted(fcx,
2325 token::intern("index"),
2330 Some(vec![input_ty]))
2332 (method, _) => method,
2335 // If some lookup succeeds, write callee into table and extract index/element
2336 // type from the method signature.
2337 // If some lookup succeeded, install method in table
2338 method.map(|method| {
2339 debug!("try_index_step: success, using overloaded indexing");
2340 fcx.inh.tables.borrow_mut().method_map.insert(method_call, method);
2341 (input_ty, make_overloaded_lvalue_return_type(fcx.tcx(), method).ty)
2345 fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2347 method_fn_ty: Ty<'tcx>,
2348 callee_expr: &'tcx hir::Expr,
2349 args_no_rcvr: &'tcx [P<hir::Expr>],
2350 tuple_arguments: TupleArgumentsFlag,
2351 expected: Expectation<'tcx>)
2352 -> ty::FnOutput<'tcx> {
2353 if method_fn_ty.references_error() {
2354 let err_inputs = err_args(fcx.tcx(), args_no_rcvr.len());
2356 let err_inputs = match tuple_arguments {
2357 DontTupleArguments => err_inputs,
2358 TupleArguments => vec![fcx.tcx().mk_tup(err_inputs)],
2361 check_argument_types(fcx,
2368 ty::FnConverging(fcx.tcx().types.err)
2370 match method_fn_ty.sty {
2371 ty::TyFnDef(_, _, ref fty) => {
2372 // HACK(eddyb) ignore self in the definition (see above).
2373 let expected_arg_tys = expected_types_for_fn_args(fcx,
2377 &fty.sig.0.inputs[1..]);
2378 check_argument_types(fcx,
2380 &fty.sig.0.inputs[1..],
2381 &expected_arg_tys[..],
2388 fcx.tcx().sess.span_bug(callee_expr.span,
2389 "method without bare fn type");
2395 /// Generic function that factors out common logic from function calls, method calls and overloaded
2397 fn check_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2399 fn_inputs: &[Ty<'tcx>],
2400 expected_arg_tys: &[Ty<'tcx>],
2401 args: &'tcx [P<hir::Expr>],
2403 tuple_arguments: TupleArgumentsFlag) {
2404 let tcx = fcx.ccx.tcx;
2406 // Grab the argument types, supplying fresh type variables
2407 // if the wrong number of arguments were supplied
2408 let supplied_arg_count = if tuple_arguments == DontTupleArguments {
2414 // All the input types from the fn signature must outlive the call
2415 // so as to validate implied bounds.
2416 for &fn_input_ty in fn_inputs {
2417 fcx.register_wf_obligation(fn_input_ty, sp, traits::MiscObligation);
2420 let mut expected_arg_tys = expected_arg_tys;
2421 let expected_arg_count = fn_inputs.len();
2422 let formal_tys = if tuple_arguments == TupleArguments {
2423 let tuple_type = structurally_resolved_type(fcx, sp, fn_inputs[0]);
2424 match tuple_type.sty {
2425 ty::TyTuple(ref arg_types) => {
2426 if arg_types.len() != args.len() {
2427 span_err!(tcx.sess, sp, E0057,
2428 "this function takes {} parameter{} but {} parameter{} supplied",
2430 if arg_types.len() == 1 {""} else {"s"},
2432 if args.len() == 1 {" was"} else {"s were"});
2433 expected_arg_tys = &[];
2434 err_args(fcx.tcx(), args.len())
2436 expected_arg_tys = match expected_arg_tys.get(0) {
2437 Some(&ty) => match ty.sty {
2438 ty::TyTuple(ref tys) => &tys,
2443 (*arg_types).clone()
2447 span_err!(tcx.sess, sp, E0059,
2448 "cannot use call notation; the first type parameter \
2449 for the function trait is neither a tuple nor unit");
2450 expected_arg_tys = &[];
2451 err_args(fcx.tcx(), args.len())
2454 } else if expected_arg_count == supplied_arg_count {
2456 } else if variadic {
2457 if supplied_arg_count >= expected_arg_count {
2460 span_err!(tcx.sess, sp, E0060,
2461 "this function takes at least {} parameter{} \
2462 but {} parameter{} supplied",
2464 if expected_arg_count == 1 {""} else {"s"},
2466 if supplied_arg_count == 1 {" was"} else {"s were"});
2467 expected_arg_tys = &[];
2468 err_args(fcx.tcx(), supplied_arg_count)
2471 span_err!(tcx.sess, sp, E0061,
2472 "this function takes {} parameter{} but {} parameter{} supplied",
2474 if expected_arg_count == 1 {""} else {"s"},
2476 if supplied_arg_count == 1 {" was"} else {"s were"});
2477 expected_arg_tys = &[];
2478 err_args(fcx.tcx(), supplied_arg_count)
2481 debug!("check_argument_types: formal_tys={:?}",
2482 formal_tys.iter().map(|t| fcx.infcx().ty_to_string(*t)).collect::<Vec<String>>());
2484 // Check the arguments.
2485 // We do this in a pretty awful way: first we typecheck any arguments
2486 // that are not anonymous functions, then we typecheck the anonymous
2487 // functions. This is so that we have more information about the types
2488 // of arguments when we typecheck the functions. This isn't really the
2489 // right way to do this.
2490 let xs = [false, true];
2491 let mut any_diverges = false; // has any of the arguments diverged?
2492 let mut warned = false; // have we already warned about unreachable code?
2493 for check_blocks in &xs {
2494 let check_blocks = *check_blocks;
2495 debug!("check_blocks={}", check_blocks);
2497 // More awful hacks: before we check argument types, try to do
2498 // an "opportunistic" vtable resolution of any trait bounds on
2499 // the call. This helps coercions.
2501 fcx.select_obligations_where_possible();
2504 // For variadic functions, we don't have a declared type for all of
2505 // the arguments hence we only do our usual type checking with
2506 // the arguments who's types we do know.
2507 let t = if variadic {
2509 } else if tuple_arguments == TupleArguments {
2514 for (i, arg) in args.iter().take(t).enumerate() {
2515 if any_diverges && !warned {
2519 .add_lint(lint::builtin::UNREACHABLE_CODE,
2522 "unreachable expression".to_string());
2525 let is_block = match arg.node {
2526 hir::ExprClosure(..) => true,
2530 if is_block == check_blocks {
2531 debug!("checking the argument");
2532 let formal_ty = formal_tys[i];
2534 // The special-cased logic below has three functions:
2535 // 1. Provide as good of an expected type as possible.
2536 let expected = expected_arg_tys.get(i).map(|&ty| {
2537 Expectation::rvalue_hint(fcx.tcx(), ty)
2540 check_expr_with_expectation(fcx, &arg,
2541 expected.unwrap_or(ExpectHasType(formal_ty)));
2542 // 2. Coerce to the most detailed type that could be coerced
2543 // to, which is `expected_ty` if `rvalue_hint` returns an
2544 // `ExpectHasType(expected_ty)`, or the `formal_ty` otherwise.
2545 let coerce_ty = expected.and_then(|e| e.only_has_type(fcx));
2546 demand::coerce(fcx, arg.span, coerce_ty.unwrap_or(formal_ty), &arg);
2548 // 3. Relate the expected type and the formal one,
2549 // if the expected type was used for the coercion.
2550 coerce_ty.map(|ty| demand::suptype(fcx, arg.span, formal_ty, ty));
2553 if let Some(&arg_ty) = fcx.inh.tables.borrow().node_types.get(&arg.id) {
2554 any_diverges = any_diverges || fcx.infcx().type_var_diverges(arg_ty);
2557 if any_diverges && !warned {
2558 let parent = fcx.ccx.tcx.map.get_parent_node(args[0].id);
2562 .add_lint(lint::builtin::UNREACHABLE_CODE,
2565 "unreachable call".to_string());
2571 // We also need to make sure we at least write the ty of the other
2572 // arguments which we skipped above.
2574 for arg in args.iter().skip(expected_arg_count) {
2575 check_expr(fcx, &arg);
2577 // There are a few types which get autopromoted when passed via varargs
2578 // in C but we just error out instead and require explicit casts.
2579 let arg_ty = structurally_resolved_type(fcx, arg.span,
2582 ty::TyFloat(ast::FloatTy::F32) => {
2583 fcx.type_error_message(arg.span,
2585 format!("can't pass an {} to variadic \
2586 function, cast to c_double", t)
2589 ty::TyInt(ast::IntTy::I8) | ty::TyInt(ast::IntTy::I16) | ty::TyBool => {
2590 fcx.type_error_message(arg.span, |t| {
2591 format!("can't pass {} to variadic \
2592 function, cast to c_int",
2596 ty::TyUint(ast::UintTy::U8) | ty::TyUint(ast::UintTy::U16) => {
2597 fcx.type_error_message(arg.span, |t| {
2598 format!("can't pass {} to variadic \
2599 function, cast to c_uint",
2609 // FIXME(#17596) Ty<'tcx> is incorrectly invariant w.r.t 'tcx.
2610 fn err_args<'tcx>(tcx: &TyCtxt<'tcx>, len: usize) -> Vec<Ty<'tcx>> {
2611 (0..len).map(|_| tcx.types.err).collect()
2614 fn write_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2615 call_expr: &hir::Expr,
2616 output: ty::FnOutput<'tcx>) {
2617 fcx.write_ty(call_expr.id, match output {
2618 ty::FnConverging(output_ty) => output_ty,
2619 ty::FnDiverging => fcx.infcx().next_diverging_ty_var()
2623 // AST fragment checking
2624 fn check_lit<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2626 expected: Expectation<'tcx>)
2629 let tcx = fcx.ccx.tcx;
2632 ast::LitKind::Str(..) => tcx.mk_static_str(),
2633 ast::LitKind::ByteStr(ref v) => {
2634 tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic),
2635 tcx.mk_array(tcx.types.u8, v.len()))
2637 ast::LitKind::Byte(_) => tcx.types.u8,
2638 ast::LitKind::Char(_) => tcx.types.char,
2639 ast::LitKind::Int(_, ast::LitIntType::Signed(t)) => tcx.mk_mach_int(t),
2640 ast::LitKind::Int(_, ast::LitIntType::Unsigned(t)) => tcx.mk_mach_uint(t),
2641 ast::LitKind::Int(_, ast::LitIntType::Unsuffixed) => {
2642 let opt_ty = expected.to_option(fcx).and_then(|ty| {
2644 ty::TyInt(_) | ty::TyUint(_) => Some(ty),
2645 ty::TyChar => Some(tcx.types.u8),
2646 ty::TyRawPtr(..) => Some(tcx.types.usize),
2647 ty::TyFnDef(..) | ty::TyFnPtr(_) => Some(tcx.types.usize),
2651 opt_ty.unwrap_or_else(
2652 || tcx.mk_int_var(fcx.infcx().next_int_var_id()))
2654 ast::LitKind::Float(_, t) => tcx.mk_mach_float(t),
2655 ast::LitKind::FloatUnsuffixed(_) => {
2656 let opt_ty = expected.to_option(fcx).and_then(|ty| {
2658 ty::TyFloat(_) => Some(ty),
2662 opt_ty.unwrap_or_else(
2663 || tcx.mk_float_var(fcx.infcx().next_float_var_id()))
2665 ast::LitKind::Bool(_) => tcx.types.bool
2669 fn check_expr_eq_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2670 expr: &'tcx hir::Expr,
2671 expected: Ty<'tcx>) {
2672 check_expr_with_hint(fcx, expr, expected);
2673 demand::eqtype(fcx, expr.span, expected, fcx.expr_ty(expr));
2676 pub fn check_expr_has_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2677 expr: &'tcx hir::Expr,
2678 expected: Ty<'tcx>) {
2679 check_expr_with_hint(fcx, expr, expected);
2680 demand::suptype(fcx, expr.span, expected, fcx.expr_ty(expr));
2683 fn check_expr_coercable_to_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2684 expr: &'tcx hir::Expr,
2685 expected: Ty<'tcx>) {
2686 check_expr_with_hint(fcx, expr, expected);
2687 demand::coerce(fcx, expr.span, expected, expr);
2690 fn check_expr_with_hint<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, expr: &'tcx hir::Expr,
2691 expected: Ty<'tcx>) {
2692 check_expr_with_expectation(fcx, expr, ExpectHasType(expected))
2695 fn check_expr_with_expectation<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2696 expr: &'tcx hir::Expr,
2697 expected: Expectation<'tcx>) {
2698 check_expr_with_expectation_and_lvalue_pref(fcx, expr, expected, NoPreference)
2701 fn check_expr<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, expr: &'tcx hir::Expr) {
2702 check_expr_with_expectation(fcx, expr, NoExpectation)
2705 fn check_expr_with_lvalue_pref<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, expr: &'tcx hir::Expr,
2706 lvalue_pref: LvaluePreference) {
2707 check_expr_with_expectation_and_lvalue_pref(fcx, expr, NoExpectation, lvalue_pref)
2710 // determine the `self` type, using fresh variables for all variables
2711 // declared on the impl declaration e.g., `impl<A,B> for Vec<(A,B)>`
2712 // would return ($0, $1) where $0 and $1 are freshly instantiated type
2714 pub fn impl_self_ty<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2715 span: Span, // (potential) receiver for this impl
2717 -> TypeAndSubsts<'tcx> {
2718 let tcx = fcx.tcx();
2720 let ity = tcx.lookup_item_type(did);
2721 let (tps, rps, raw_ty) =
2722 (ity.generics.types.get_slice(subst::TypeSpace),
2723 ity.generics.regions.get_slice(subst::TypeSpace),
2726 debug!("impl_self_ty: tps={:?} rps={:?} raw_ty={:?}", tps, rps, raw_ty);
2728 let rps = fcx.inh.infcx.region_vars_for_defs(span, rps);
2729 let mut substs = subst::Substs::new(
2730 VecPerParamSpace::empty(),
2731 VecPerParamSpace::new(rps, Vec::new(), Vec::new()));
2732 fcx.inh.infcx.type_vars_for_defs(span, ParamSpace::TypeSpace, &mut substs, tps);
2733 let substd_ty = fcx.instantiate_type_scheme(span, &substs, &raw_ty);
2735 TypeAndSubsts { substs: substs, ty: substd_ty }
2738 /// Controls whether the arguments are tupled. This is used for the call
2741 /// Tupling means that all call-side arguments are packed into a tuple and
2742 /// passed as a single parameter. For example, if tupling is enabled, this
2745 /// fn f(x: (isize, isize))
2747 /// Can be called as:
2754 #[derive(Clone, Eq, PartialEq)]
2755 enum TupleArgumentsFlag {
2760 /// Unifies the return type with the expected type early, for more coercions
2761 /// and forward type information on the argument expressions.
2762 fn expected_types_for_fn_args<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2764 expected_ret: Expectation<'tcx>,
2765 formal_ret: ty::FnOutput<'tcx>,
2766 formal_args: &[Ty<'tcx>])
2768 let expected_args = expected_ret.only_has_type(fcx).and_then(|ret_ty| {
2769 if let ty::FnConverging(formal_ret_ty) = formal_ret {
2770 fcx.infcx().commit_regions_if_ok(|| {
2771 // Attempt to apply a subtyping relationship between the formal
2772 // return type (likely containing type variables if the function
2773 // is polymorphic) and the expected return type.
2774 // No argument expectations are produced if unification fails.
2775 let origin = TypeOrigin::Misc(call_span);
2776 let ures = fcx.infcx().sub_types(false, origin, formal_ret_ty, ret_ty);
2777 // FIXME(#15760) can't use try! here, FromError doesn't default
2778 // to identity so the resulting type is not constrained.
2779 if let Err(e) = ures {
2783 // Record all the argument types, with the substitutions
2784 // produced from the above subtyping unification.
2785 Ok(formal_args.iter().map(|ty| {
2786 fcx.infcx().resolve_type_vars_if_possible(ty)
2792 }).unwrap_or(vec![]);
2793 debug!("expected_types_for_fn_args(formal={:?} -> {:?}, expected={:?} -> {:?})",
2794 formal_args, formal_ret,
2795 expected_args, expected_ret);
2800 /// If an expression has any sub-expressions that result in a type error,
2801 /// inspecting that expression's type with `ty.references_error()` will return
2802 /// true. Likewise, if an expression is known to diverge, inspecting its
2803 /// type with `ty::type_is_bot` will return true (n.b.: since Rust is
2804 /// strict, _|_ can appear in the type of an expression that does not,
2805 /// itself, diverge: for example, fn() -> _|_.)
2806 /// Note that inspecting a type's structure *directly* may expose the fact
2807 /// that there are actually multiple representations for `TyError`, so avoid
2808 /// that when err needs to be handled differently.
2809 fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2810 expr: &'tcx hir::Expr,
2811 expected: Expectation<'tcx>,
2812 lvalue_pref: LvaluePreference) {
2813 debug!(">> typechecking: expr={:?} expected={:?}",
2816 // Checks a method call.
2817 fn check_method_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2818 expr: &'tcx hir::Expr,
2819 method_name: Spanned<ast::Name>,
2820 args: &'tcx [P<hir::Expr>],
2822 expected: Expectation<'tcx>,
2823 lvalue_pref: LvaluePreference) {
2824 let rcvr = &args[0];
2825 check_expr_with_lvalue_pref(fcx, &rcvr, lvalue_pref);
2827 // no need to check for bot/err -- callee does that
2828 let expr_t = structurally_resolved_type(fcx,
2830 fcx.expr_ty(&rcvr));
2832 let tps = tps.iter().map(|ast_ty| fcx.to_ty(&ast_ty)).collect::<Vec<_>>();
2833 let fn_ty = match method::lookup(fcx,
2841 let method_ty = method.ty;
2842 let method_call = MethodCall::expr(expr.id);
2843 fcx.inh.tables.borrow_mut().method_map.insert(method_call, method);
2847 if method_name.node != special_idents::invalid.name {
2848 method::report_error(fcx, method_name.span, expr_t,
2849 method_name.node, Some(rcvr), error);
2851 fcx.write_error(expr.id);
2856 // Call the generic checker.
2857 let ret_ty = check_method_argument_types(fcx,
2865 write_call(fcx, expr, ret_ty);
2868 // A generic function for checking the then and else in an if
2870 fn check_then_else<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
2871 cond_expr: &'tcx hir::Expr,
2872 then_blk: &'tcx hir::Block,
2873 opt_else_expr: Option<&'tcx hir::Expr>,
2876 expected: Expectation<'tcx>) {
2877 check_expr_has_type(fcx, cond_expr, fcx.tcx().types.bool);
2879 let expected = expected.adjust_for_branches(fcx);
2880 check_block_with_expected(fcx, then_blk, expected);
2881 let then_ty = fcx.node_ty(then_blk.id);
2883 let unit = fcx.tcx().mk_nil();
2884 let (origin, expected, found, result) =
2885 if let Some(else_expr) = opt_else_expr {
2886 check_expr_with_expectation(fcx, else_expr, expected);
2887 let else_ty = fcx.expr_ty(else_expr);
2888 let origin = TypeOrigin::IfExpression(sp);
2890 // Only try to coerce-unify if we have a then expression
2891 // to assign coercions to, otherwise it's () or diverging.
2892 let result = if let Some(ref then) = then_blk.expr {
2893 let res = coercion::try_find_lub(fcx, origin, || Some(&**then),
2894 then_ty, else_expr);
2896 // In case we did perform an adjustment, we have to update
2897 // the type of the block, because old trans still uses it.
2898 let adj = fcx.inh.tables.borrow().adjustments.get(&then.id).cloned();
2899 if res.is_ok() && adj.is_some() {
2900 fcx.write_ty(then_blk.id, fcx.adjust_expr_ty(then, adj.as_ref()));
2905 fcx.infcx().commit_if_ok(|_| {
2906 let trace = TypeTrace::types(origin, true, then_ty, else_ty);
2907 fcx.infcx().lub(true, trace).relate(&then_ty, &else_ty)
2910 (origin, then_ty, else_ty, result)
2912 let origin = TypeOrigin::IfExpressionWithNoElse(sp);
2913 (origin, unit, then_ty,
2914 fcx.infcx().eq_types(true, origin, unit, then_ty).map(|_| unit))
2917 let if_ty = match result {
2919 if fcx.expr_ty(cond_expr).references_error() {
2926 fcx.infcx().report_mismatched_types(origin, expected, found, e);
2931 fcx.write_ty(id, if_ty);
2934 // Check field access expressions
2935 fn check_field<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
2936 expr: &'tcx hir::Expr,
2937 lvalue_pref: LvaluePreference,
2938 base: &'tcx hir::Expr,
2939 field: &Spanned<ast::Name>) {
2940 check_expr_with_lvalue_pref(fcx, base, lvalue_pref);
2941 let expr_t = structurally_resolved_type(fcx, expr.span,
2943 // FIXME(eddyb) #12808 Integrate privacy into this auto-deref loop.
2944 let (_, autoderefs, field_ty) = autoderef(fcx,
2948 UnresolvedTypeAction::Error,
2952 ty::TyStruct(base_def, substs) => {
2953 debug!("struct named {:?}", base_t);
2954 base_def.struct_variant()
2955 .find_field_named(field.node)
2956 .map(|f| fcx.field_ty(expr.span, f, substs))
2963 fcx.write_ty(expr.id, field_ty);
2964 fcx.write_autoderef_adjustment(base.id, autoderefs);
2970 if field.node == special_idents::invalid.name {
2971 fcx.write_error(expr.id);
2975 if method::exists(fcx, field.span, field.node, expr_t, expr.id) {
2976 fcx.type_error_struct(field.span,
2978 format!("attempted to take value of method `{}` on type \
2979 `{}`", field.node, actual)
2982 .fileline_help(field.span,
2983 "maybe a `()` to call it is missing? \
2984 If not, try an anonymous function")
2987 let mut err = fcx.type_error_struct(
2990 format!("attempted access of field `{}` on \
2991 type `{}`, but no field with that \
2997 if let ty::TyStruct(def, _) = expr_t.sty {
2998 suggest_field_names(&mut err, def.struct_variant(), field, vec![]);
3003 fcx.write_error(expr.id);
3006 // displays hints about the closest matches in field names
3007 fn suggest_field_names<'tcx>(err: &mut DiagnosticBuilder,
3008 variant: ty::VariantDef<'tcx>,
3009 field: &Spanned<ast::Name>,
3010 skip : Vec<InternedString>) {
3011 let name = field.node.as_str();
3012 let names = variant.fields
3014 .filter_map(|ref field| {
3015 // ignore already set fields and private fields from non-local crates
3016 if skip.iter().any(|x| *x == field.name.as_str()) ||
3017 (variant.did.krate != LOCAL_CRATE && field.vis != Visibility::Public) {
3024 // only find fits with at least one matching letter
3025 if let Some(name) = find_best_match_for_name(names, &name, Some(name.len())) {
3026 err.span_help(field.span,
3027 &format!("did you mean `{}`?", name));
3031 // Check tuple index expressions
3032 fn check_tup_field<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
3033 expr: &'tcx hir::Expr,
3034 lvalue_pref: LvaluePreference,
3035 base: &'tcx hir::Expr,
3036 idx: codemap::Spanned<usize>) {
3037 check_expr_with_lvalue_pref(fcx, base, lvalue_pref);
3038 let expr_t = structurally_resolved_type(fcx, expr.span,
3040 let mut tuple_like = false;
3041 // FIXME(eddyb) #12808 Integrate privacy into this auto-deref loop.
3042 let (_, autoderefs, field_ty) = autoderef(fcx,
3046 UnresolvedTypeAction::Error,
3050 ty::TyStruct(base_def, substs) => {
3051 tuple_like = base_def.struct_variant().is_tuple_struct();
3053 debug!("tuple struct named {:?}", base_t);
3054 base_def.struct_variant()
3057 .map(|f| fcx.field_ty(expr.span, f, substs))
3062 ty::TyTuple(ref v) => {
3064 if idx.node < v.len() { Some(v[idx.node]) } else { None }
3071 fcx.write_ty(expr.id, field_ty);
3072 fcx.write_autoderef_adjustment(base.id, autoderefs);
3077 fcx.type_error_message(
3081 format!("attempted out-of-bounds tuple index `{}` on \
3086 format!("attempted tuple index `{}` on type `{}`, but the \
3087 type was not a tuple or tuple struct",
3094 fcx.write_error(expr.id);
3097 fn report_unknown_field<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
3099 variant: ty::VariantDef<'tcx>,
3101 skip_fields: &[hir::Field]) {
3102 let mut err = fcx.type_error_struct(
3104 |actual| if let ty::TyEnum(..) = ty.sty {
3105 format!("struct variant `{}::{}` has no field named `{}`",
3106 actual, variant.name.as_str(), field.name.node)
3108 format!("structure `{}` has no field named `{}`",
3109 actual, field.name.node)
3113 // prevent all specified fields from being suggested
3114 let skip_fields = skip_fields.iter().map(|ref x| x.name.node.as_str());
3115 suggest_field_names(&mut err, variant, &field.name, skip_fields.collect());
3119 fn check_expr_struct_fields<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
3122 variant: ty::VariantDef<'tcx>,
3123 ast_fields: &'tcx [hir::Field],
3124 check_completeness: bool) {
3125 let tcx = fcx.ccx.tcx;
3126 let substs = match adt_ty.sty {
3127 ty::TyStruct(_, substs) | ty::TyEnum(_, substs) => substs,
3128 _ => tcx.sess.span_bug(span, "non-ADT passed to check_expr_struct_fields")
3131 let mut remaining_fields = FnvHashMap();
3132 for field in &variant.fields {
3133 remaining_fields.insert(field.name, field);
3136 let mut error_happened = false;
3138 // Typecheck each field.
3139 for field in ast_fields {
3140 let expected_field_type;
3142 if let Some(v_field) = remaining_fields.remove(&field.name.node) {
3143 expected_field_type = fcx.field_ty(field.span, v_field, substs);
3145 error_happened = true;
3146 expected_field_type = tcx.types.err;
3147 if let Some(_) = variant.find_field_named(field.name.node) {
3148 span_err!(fcx.tcx().sess, field.name.span, E0062,
3149 "field `{}` specified more than once",
3152 report_unknown_field(fcx, adt_ty, variant, field, ast_fields);
3156 // Make sure to give a type to the field even if there's
3157 // an error, so we can continue typechecking
3158 check_expr_coercable_to_type(fcx, &field.expr, expected_field_type);
3161 // Make sure the programmer specified all the fields.
3162 if check_completeness &&
3164 !remaining_fields.is_empty()
3166 span_err!(tcx.sess, span, E0063,
3167 "missing field{} {} in initializer of `{}`",
3168 if remaining_fields.len() == 1 {""} else {"s"},
3169 remaining_fields.keys()
3170 .map(|n| format!("`{}`", n))
3171 .collect::<Vec<_>>()
3178 fn check_struct_fields_on_error<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
3180 fields: &'tcx [hir::Field],
3181 base_expr: &'tcx Option<P<hir::Expr>>) {
3182 // Make sure to still write the types
3183 // otherwise we might ICE
3184 fcx.write_error(id);
3185 for field in fields {
3186 check_expr(fcx, &field.expr);
3189 Some(ref base) => check_expr(fcx, &base),
3194 fn check_expr_struct<'a, 'tcx>(fcx: &FnCtxt<'a,'tcx>,
3197 fields: &'tcx [hir::Field],
3198 base_expr: &'tcx Option<P<hir::Expr>>)
3200 let tcx = fcx.tcx();
3202 // Find the relevant variant
3203 let def = lookup_full_def(tcx, path.span, expr.id);
3204 if def == Def::Err {
3205 check_struct_fields_on_error(fcx, expr.id, fields, base_expr);
3208 let variant = match fcx.def_struct_variant(def, path.span) {
3209 Some((_, variant)) => variant,
3211 span_err!(fcx.tcx().sess, path.span, E0071,
3212 "`{}` does not name a structure",
3213 pprust::path_to_string(path));
3214 check_struct_fields_on_error(fcx, expr.id, fields, base_expr);
3219 let expr_ty = fcx.instantiate_type(def.def_id(), path);
3220 fcx.write_ty(expr.id, expr_ty);
3222 check_expr_struct_fields(fcx, expr_ty, expr.span, variant, fields,
3223 base_expr.is_none());
3224 if let &Some(ref base_expr) = base_expr {
3225 check_expr_has_type(fcx, base_expr, expr_ty);
3227 ty::TyStruct(adt, substs) => {
3228 fcx.inh.tables.borrow_mut().fru_field_types.insert(
3230 adt.struct_variant().fields.iter().map(|f| {
3231 fcx.normalize_associated_types_in(
3232 expr.span, &f.ty(tcx, substs)
3238 span_err!(tcx.sess, base_expr.span, E0436,
3239 "functional record update syntax requires a struct");
3245 type ExprCheckerWithTy = fn(&FnCtxt, &hir::Expr, Ty);
3247 let tcx = fcx.ccx.tcx;
3250 hir::ExprBox(ref subexpr) => {
3251 let expected_inner = expected.to_option(fcx).map_or(NoExpectation, |ty| {
3253 ty::TyBox(ty) => Expectation::rvalue_hint(tcx, ty),
3257 check_expr_with_expectation(fcx, subexpr, expected_inner);
3258 let referent_ty = fcx.expr_ty(&subexpr);
3259 fcx.write_ty(id, tcx.mk_box(referent_ty));
3262 hir::ExprLit(ref lit) => {
3263 let typ = check_lit(fcx, &lit, expected);
3264 fcx.write_ty(id, typ);
3266 hir::ExprBinary(op, ref lhs, ref rhs) => {
3267 op::check_binop(fcx, expr, op, lhs, rhs);
3269 hir::ExprAssignOp(op, ref lhs, ref rhs) => {
3270 op::check_binop_assign(fcx, expr, op, lhs, rhs);
3272 hir::ExprUnary(unop, ref oprnd) => {
3273 let expected_inner = match unop {
3274 hir::UnNot | hir::UnNeg => {
3281 let lvalue_pref = match unop {
3282 hir::UnDeref => lvalue_pref,
3285 check_expr_with_expectation_and_lvalue_pref(
3286 fcx, &oprnd, expected_inner, lvalue_pref);
3287 let mut oprnd_t = fcx.expr_ty(&oprnd);
3289 if !oprnd_t.references_error() {
3292 oprnd_t = structurally_resolved_type(fcx, expr.span, oprnd_t);
3294 if let Some(mt) = oprnd_t.builtin_deref(true, NoPreference) {
3296 } else if let Some(method) = try_overloaded_deref(
3297 fcx, expr.span, Some(&oprnd), oprnd_t, lvalue_pref) {
3298 oprnd_t = make_overloaded_lvalue_return_type(tcx, method).ty;
3299 fcx.inh.tables.borrow_mut().method_map.insert(MethodCall::expr(expr.id),
3302 fcx.type_error_message(expr.span, |actual| {
3303 format!("type `{}` cannot be \
3304 dereferenced", actual)
3306 oprnd_t = tcx.types.err;
3310 oprnd_t = structurally_resolved_type(fcx, oprnd.span,
3312 if !(oprnd_t.is_integral() || oprnd_t.sty == ty::TyBool) {
3313 oprnd_t = op::check_user_unop(fcx, "!", "not",
3314 tcx.lang_items.not_trait(),
3315 expr, &oprnd, oprnd_t, unop);
3319 oprnd_t = structurally_resolved_type(fcx, oprnd.span,
3321 if !(oprnd_t.is_integral() || oprnd_t.is_fp()) {
3322 oprnd_t = op::check_user_unop(fcx, "-", "neg",
3323 tcx.lang_items.neg_trait(),
3324 expr, &oprnd, oprnd_t, unop);
3329 fcx.write_ty(id, oprnd_t);
3331 hir::ExprAddrOf(mutbl, ref oprnd) => {
3332 let hint = expected.only_has_type(fcx).map_or(NoExpectation, |ty| {
3334 ty::TyRef(_, ref mt) | ty::TyRawPtr(ref mt) => {
3335 if fcx.tcx().expr_is_lval(&oprnd) {
3336 // Lvalues may legitimately have unsized types.
3337 // For example, dereferences of a fat pointer and
3338 // the last field of a struct can be unsized.
3339 ExpectHasType(mt.ty)
3341 Expectation::rvalue_hint(tcx, mt.ty)
3347 let lvalue_pref = LvaluePreference::from_mutbl(mutbl);
3348 check_expr_with_expectation_and_lvalue_pref(fcx,
3353 let tm = ty::TypeAndMut { ty: fcx.expr_ty(&oprnd), mutbl: mutbl };
3354 let oprnd_t = if tm.ty.references_error() {
3357 // Note: at this point, we cannot say what the best lifetime
3358 // is to use for resulting pointer. We want to use the
3359 // shortest lifetime possible so as to avoid spurious borrowck
3360 // errors. Moreover, the longest lifetime will depend on the
3361 // precise details of the value whose address is being taken
3362 // (and how long it is valid), which we don't know yet until type
3363 // inference is complete.
3365 // Therefore, here we simply generate a region variable. The
3366 // region inferencer will then select the ultimate value.
3367 // Finally, borrowck is charged with guaranteeing that the
3368 // value whose address was taken can actually be made to live
3369 // as long as it needs to live.
3370 let region = fcx.infcx().next_region_var(infer::AddrOfRegion(expr.span));
3371 tcx.mk_ref(tcx.mk_region(region), tm)
3373 fcx.write_ty(id, oprnd_t);
3375 hir::ExprPath(ref maybe_qself, ref path) => {
3376 let opt_self_ty = maybe_qself.as_ref().map(|qself| {
3377 fcx.to_ty(&qself.ty)
3380 let path_res = if let Some(&d) = tcx.def_map.borrow().get(&id) {
3382 } else if let Some(hir::QSelf { position: 0, .. }) = *maybe_qself {
3383 // Create some fake resolution that can't possibly be a type.
3384 def::PathResolution {
3385 base_def: Def::Mod(tcx.map.local_def_id(ast::CRATE_NODE_ID)),
3386 depth: path.segments.len()
3389 tcx.sess.span_bug(expr.span,
3390 &format!("unbound path {:?}", expr))
3393 if let Some((opt_ty, segments, def)) =
3394 resolve_ty_and_def_ufcs(fcx, path_res, opt_self_ty, path,
3395 expr.span, expr.id) {
3396 if def != Def::Err {
3397 let (scheme, predicates) = type_scheme_and_predicates_for_def(fcx,
3400 instantiate_path(fcx,
3409 fcx.write_ty(id, fcx.tcx().types.err);
3413 // We always require that the type provided as the value for
3414 // a type parameter outlives the moment of instantiation.
3415 fcx.opt_node_ty_substs(expr.id, |item_substs| {
3416 fcx.add_wf_bounds(&item_substs.substs, expr);
3419 hir::ExprInlineAsm(_, ref outputs, ref inputs) => {
3420 for output in outputs {
3421 check_expr(fcx, output);
3423 for input in inputs {
3424 check_expr(fcx, input);
3428 hir::ExprBreak(_) => { fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); }
3429 hir::ExprAgain(_) => { fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); }
3430 hir::ExprRet(ref expr_opt) => {
3432 ty::FnConverging(result_type) => {
3435 if let Err(_) = fcx.mk_eqty(false, TypeOrigin::Misc(expr.span),
3436 result_type, fcx.tcx().mk_nil()) {
3437 span_err!(tcx.sess, expr.span, E0069,
3438 "`return;` in a function whose return type is \
3442 check_expr_coercable_to_type(fcx, &e, result_type);
3446 ty::FnDiverging => {
3447 if let Some(ref e) = *expr_opt {
3448 check_expr(fcx, &e);
3450 span_err!(tcx.sess, expr.span, E0166,
3451 "`return` in a function declared as diverging");
3454 fcx.write_ty(id, fcx.infcx().next_diverging_ty_var());
3456 hir::ExprAssign(ref lhs, ref rhs) => {
3457 check_expr_with_lvalue_pref(fcx, &lhs, PreferMutLvalue);
3459 let tcx = fcx.tcx();
3460 if !tcx.expr_is_lval(&lhs) {
3461 span_err!(tcx.sess, expr.span, E0070,
3462 "invalid left-hand side expression");
3465 let lhs_ty = fcx.expr_ty(&lhs);
3466 check_expr_coercable_to_type(fcx, &rhs, lhs_ty);
3467 let rhs_ty = fcx.expr_ty(&rhs);
3469 fcx.require_expr_have_sized_type(&lhs, traits::AssignmentLhsSized);
3471 if lhs_ty.references_error() || rhs_ty.references_error() {
3472 fcx.write_error(id);
3477 hir::ExprIf(ref cond, ref then_blk, ref opt_else_expr) => {
3478 check_then_else(fcx, &cond, &then_blk, opt_else_expr.as_ref().map(|e| &**e),
3479 id, expr.span, expected);
3481 hir::ExprWhile(ref cond, ref body, _) => {
3482 check_expr_has_type(fcx, &cond, tcx.types.bool);
3483 check_block_no_value(fcx, &body);
3484 let cond_ty = fcx.expr_ty(&cond);
3485 let body_ty = fcx.node_ty(body.id);
3486 if cond_ty.references_error() || body_ty.references_error() {
3487 fcx.write_error(id);
3493 hir::ExprLoop(ref body, _) => {
3494 check_block_no_value(fcx, &body);
3495 if !may_break(tcx, expr.id, &body) {
3496 fcx.write_ty(id, fcx.infcx().next_diverging_ty_var());
3501 hir::ExprMatch(ref discrim, ref arms, match_src) => {
3502 _match::check_match(fcx, expr, &discrim, arms, expected, match_src);
3504 hir::ExprClosure(capture, ref decl, ref body) => {
3505 closure::check_expr_closure(fcx, expr, capture, &decl, &body, expected);
3507 hir::ExprBlock(ref b) => {
3508 check_block_with_expected(fcx, &b, expected);
3509 fcx.write_ty(id, fcx.node_ty(b.id));
3511 hir::ExprCall(ref callee, ref args) => {
3512 callee::check_call(fcx, expr, &callee, &args[..], expected);
3514 // we must check that return type of called functions is WF:
3515 let ret_ty = fcx.expr_ty(expr);
3516 fcx.register_wf_obligation(ret_ty, expr.span, traits::MiscObligation);
3518 hir::ExprMethodCall(name, ref tps, ref args) => {
3519 check_method_call(fcx, expr, name, &args[..], &tps[..], expected, lvalue_pref);
3520 let arg_tys = args.iter().map(|a| fcx.expr_ty(&a));
3521 let args_err = arg_tys.fold(false, |rest_err, a| rest_err || a.references_error());
3523 fcx.write_error(id);
3526 hir::ExprCast(ref e, ref t) => {
3527 if let hir::TyFixedLengthVec(_, ref count_expr) = t.node {
3528 check_expr_with_hint(fcx, &count_expr, tcx.types.usize);
3531 // Find the type of `e`. Supply hints based on the type we are casting to,
3533 let t_cast = fcx.to_ty(t);
3534 let t_cast = structurally_resolved_type(fcx, expr.span, t_cast);
3535 check_expr_with_expectation(fcx, e, ExpectCastableToType(t_cast));
3536 let t_expr = fcx.expr_ty(e);
3537 let t_cast = fcx.infcx().resolve_type_vars_if_possible(&t_cast);
3539 // Eagerly check for some obvious errors.
3540 if t_expr.references_error() || t_cast.references_error() {
3541 fcx.write_error(id);
3542 } else if !fcx.type_is_known_to_be_sized(t_cast, expr.span) {
3543 report_cast_to_unsized_type(fcx, expr.span, t.span, e.span, t_cast, t_expr, id);
3545 // Write a type for the whole expression, assuming everything is going
3547 fcx.write_ty(id, t_cast);
3549 // Defer other checks until we're done type checking.
3550 let mut deferred_cast_checks = fcx.inh.deferred_cast_checks.borrow_mut();
3551 let cast_check = cast::CastCheck::new(e, t_expr, t_cast, expr.span);
3552 deferred_cast_checks.push(cast_check);
3555 hir::ExprType(ref e, ref t) => {
3556 let typ = fcx.to_ty(&t);
3557 check_expr_eq_type(fcx, &e, typ);
3558 fcx.write_ty(id, typ);
3560 hir::ExprVec(ref args) => {
3561 let uty = expected.to_option(fcx).and_then(|uty| {
3563 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3568 let mut unified = fcx.infcx().next_ty_var();
3569 let coerce_to = uty.unwrap_or(unified);
3571 for (i, e) in args.iter().enumerate() {
3572 check_expr_with_hint(fcx, e, coerce_to);
3573 let e_ty = fcx.expr_ty(e);
3574 let origin = TypeOrigin::Misc(e.span);
3576 // Special-case the first element, as it has no "previous expressions".
3577 let result = if i == 0 {
3578 coercion::try(fcx, e, coerce_to)
3580 let prev_elems = || args[..i].iter().map(|e| &**e);
3581 coercion::try_find_lub(fcx, origin, prev_elems, unified, e)
3585 Ok(ty) => unified = ty,
3587 fcx.infcx().report_mismatched_types(origin, unified, e_ty, e);
3591 fcx.write_ty(id, tcx.mk_array(unified, args.len()));
3593 hir::ExprRepeat(ref element, ref count_expr) => {
3594 check_expr_has_type(fcx, &count_expr, tcx.types.usize);
3595 let count = fcx.tcx().eval_repeat_count(&count_expr);
3597 let uty = match expected {
3598 ExpectHasType(uty) => {
3600 ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty),
3607 let (element_ty, t) = match uty {
3609 check_expr_coercable_to_type(fcx, &element, uty);
3613 let t: Ty = fcx.infcx().next_ty_var();
3614 check_expr_has_type(fcx, &element, t);
3615 (fcx.expr_ty(&element), t)
3620 // For [foo, ..n] where n > 1, `foo` must have
3622 fcx.require_type_meets(
3629 if element_ty.references_error() {
3630 fcx.write_error(id);
3632 let t = tcx.mk_array(t, count);
3633 fcx.write_ty(id, t);
3636 hir::ExprTup(ref elts) => {
3637 let flds = expected.only_has_type(fcx).and_then(|ty| {
3639 ty::TyTuple(ref flds) => Some(&flds[..]),
3643 let mut err_field = false;
3645 let elt_ts = elts.iter().enumerate().map(|(i, e)| {
3646 let t = match flds {
3647 Some(ref fs) if i < fs.len() => {
3649 check_expr_coercable_to_type(fcx, &e, ety);
3653 check_expr_with_expectation(fcx, &e, NoExpectation);
3657 err_field = err_field || t.references_error();
3661 fcx.write_error(id);
3663 let typ = tcx.mk_tup(elt_ts);
3664 fcx.write_ty(id, typ);
3667 hir::ExprStruct(ref path, ref fields, ref base_expr) => {
3668 check_expr_struct(fcx, expr, path, fields, base_expr);
3670 fcx.require_expr_have_sized_type(expr, traits::StructInitializerSized);
3672 hir::ExprField(ref base, ref field) => {
3673 check_field(fcx, expr, lvalue_pref, &base, field);
3675 hir::ExprTupField(ref base, idx) => {
3676 check_tup_field(fcx, expr, lvalue_pref, &base, idx);
3678 hir::ExprIndex(ref base, ref idx) => {
3679 check_expr_with_lvalue_pref(fcx, &base, lvalue_pref);
3680 check_expr(fcx, &idx);
3682 let base_t = fcx.expr_ty(&base);
3683 let idx_t = fcx.expr_ty(&idx);
3685 if base_t.references_error() {
3686 fcx.write_ty(id, base_t);
3687 } else if idx_t.references_error() {
3688 fcx.write_ty(id, idx_t);
3690 let base_t = structurally_resolved_type(fcx, expr.span, base_t);
3691 match lookup_indexing(fcx, expr, base, base_t, idx_t, lvalue_pref) {
3692 Some((index_ty, element_ty)) => {
3693 let idx_expr_ty = fcx.expr_ty(idx);
3694 demand::eqtype(fcx, expr.span, index_ty, idx_expr_ty);
3695 fcx.write_ty(id, element_ty);
3698 check_expr_has_type(fcx, &idx, fcx.tcx().types.err);
3699 fcx.type_error_message(
3702 format!("cannot index a value of type `{}`",
3707 fcx.write_ty(id, fcx.tcx().types.err);
3714 debug!("type of expr({}) {} is...", expr.id,
3715 pprust::expr_to_string(expr));
3716 debug!("... {:?}, expected is {:?}",
3721 pub fn resolve_ty_and_def_ufcs<'a, 'b, 'tcx>(fcx: &FnCtxt<'b, 'tcx>,
3722 path_res: def::PathResolution,
3723 opt_self_ty: Option<Ty<'tcx>>,
3724 path: &'a hir::Path,
3726 node_id: ast::NodeId)
3727 -> Option<(Option<Ty<'tcx>>,
3728 &'a [hir::PathSegment],
3732 // If fully resolved already, we don't have to do anything.
3733 if path_res.depth == 0 {
3734 Some((opt_self_ty, &path.segments, path_res.base_def))
3736 let mut def = path_res.base_def;
3737 let ty_segments = path.segments.split_last().unwrap().1;
3738 let base_ty_end = path.segments.len() - path_res.depth;
3739 let ty = astconv::finish_resolving_def_to_ty(fcx, fcx, span,
3740 PathParamMode::Optional,
3743 &ty_segments[..base_ty_end],
3744 &ty_segments[base_ty_end..]);
3745 let item_segment = path.segments.last().unwrap();
3746 let item_name = item_segment.identifier.name;
3747 match method::resolve_ufcs(fcx, span, item_name, ty, node_id) {
3749 // Write back the new resolution.
3750 fcx.ccx.tcx.def_map.borrow_mut()
3751 .insert(node_id, def::PathResolution {
3755 Some((Some(ty), slice::ref_slice(item_segment), def))
3758 if item_name != special_idents::invalid.name {
3759 method::report_error(fcx, span, ty, item_name, None, error);
3761 fcx.write_error(node_id);
3768 impl<'tcx> Expectation<'tcx> {
3769 /// Provide an expectation for an rvalue expression given an *optional*
3770 /// hint, which is not required for type safety (the resulting type might
3771 /// be checked higher up, as is the case with `&expr` and `box expr`), but
3772 /// is useful in determining the concrete type.
3774 /// The primary use case is where the expected type is a fat pointer,
3775 /// like `&[isize]`. For example, consider the following statement:
3777 /// let x: &[isize] = &[1, 2, 3];
3779 /// In this case, the expected type for the `&[1, 2, 3]` expression is
3780 /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
3781 /// expectation `ExpectHasType([isize])`, that would be too strong --
3782 /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
3783 /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
3784 /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
3785 /// which still is useful, because it informs integer literals and the like.
3786 /// See the test case `test/run-pass/coerce-expect-unsized.rs` and #20169
3787 /// for examples of where this comes up,.
3788 fn rvalue_hint(tcx: &TyCtxt<'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> {
3789 match tcx.struct_tail(ty).sty {
3790 ty::TySlice(_) | ty::TyStr | ty::TyTrait(..) => {
3791 ExpectRvalueLikeUnsized(ty)
3793 _ => ExpectHasType(ty)
3797 // Resolves `expected` by a single level if it is a variable. If
3798 // there is no expected type or resolution is not possible (e.g.,
3799 // no constraints yet present), just returns `None`.
3800 fn resolve<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> {
3805 ExpectCastableToType(t) => {
3806 ExpectCastableToType(
3807 fcx.infcx().resolve_type_vars_if_possible(&t))
3809 ExpectHasType(t) => {
3811 fcx.infcx().resolve_type_vars_if_possible(&t))
3813 ExpectRvalueLikeUnsized(t) => {
3814 ExpectRvalueLikeUnsized(
3815 fcx.infcx().resolve_type_vars_if_possible(&t))
3820 fn to_option<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
3821 match self.resolve(fcx) {
3822 NoExpectation => None,
3823 ExpectCastableToType(ty) |
3825 ExpectRvalueLikeUnsized(ty) => Some(ty),
3829 fn only_has_type<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
3830 match self.resolve(fcx) {
3831 ExpectHasType(ty) => Some(ty),
3837 pub fn check_decl_initializer<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
3838 local: &'tcx hir::Local,
3839 init: &'tcx hir::Expr)
3841 let ref_bindings = fcx.tcx().pat_contains_ref_binding(&local.pat);
3843 let local_ty = fcx.local_ty(init.span, local.id);
3844 if let Some(m) = ref_bindings {
3845 // Somewhat subtle: if we have a `ref` binding in the pattern,
3846 // we want to avoid introducing coercions for the RHS. This is
3847 // both because it helps preserve sanity and, in the case of
3848 // ref mut, for soundness (issue #23116). In particular, in
3849 // the latter case, we need to be clear that the type of the
3850 // referent for the reference that results is *equal to* the
3851 // type of the lvalue it is referencing, and not some
3852 // supertype thereof.
3853 check_expr_with_lvalue_pref(fcx, init, LvaluePreference::from_mutbl(m));
3854 let init_ty = fcx.expr_ty(init);
3855 demand::eqtype(fcx, init.span, init_ty, local_ty);
3857 check_expr_coercable_to_type(fcx, init, local_ty)
3861 pub fn check_decl_local<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, local: &'tcx hir::Local) {
3862 let tcx = fcx.ccx.tcx;
3864 let t = fcx.local_ty(local.span, local.id);
3865 fcx.write_ty(local.id, t);
3867 if let Some(ref init) = local.init {
3868 check_decl_initializer(fcx, local, &init);
3869 let init_ty = fcx.expr_ty(&init);
3870 if init_ty.references_error() {
3871 fcx.write_ty(local.id, init_ty);
3875 let pcx = pat_ctxt {
3877 map: pat_id_map(&tcx.def_map, &local.pat),
3879 _match::check_pat(&pcx, &local.pat, t);
3880 let pat_ty = fcx.node_ty(local.pat.id);
3881 if pat_ty.references_error() {
3882 fcx.write_ty(local.id, pat_ty);
3886 pub fn check_stmt<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, stmt: &'tcx hir::Stmt) {
3888 let mut saw_bot = false;
3889 let mut saw_err = false;
3891 hir::StmtDecl(ref decl, id) => {
3894 hir::DeclLocal(ref l) => {
3895 check_decl_local(fcx, &l);
3896 let l_t = fcx.node_ty(l.id);
3897 saw_bot = saw_bot || fcx.infcx().type_var_diverges(l_t);
3898 saw_err = saw_err || l_t.references_error();
3900 hir::DeclItem(_) => {/* ignore for now */ }
3903 hir::StmtExpr(ref expr, id) => {
3905 // Check with expected type of ()
3906 check_expr_has_type(fcx, &expr, fcx.tcx().mk_nil());
3907 let expr_ty = fcx.expr_ty(&expr);
3908 saw_bot = saw_bot || fcx.infcx().type_var_diverges(expr_ty);
3909 saw_err = saw_err || expr_ty.references_error();
3911 hir::StmtSemi(ref expr, id) => {
3913 check_expr(fcx, &expr);
3914 let expr_ty = fcx.expr_ty(&expr);
3915 saw_bot |= fcx.infcx().type_var_diverges(expr_ty);
3916 saw_err |= expr_ty.references_error();
3920 fcx.write_ty(node_id, fcx.infcx().next_diverging_ty_var());
3923 fcx.write_error(node_id);
3926 fcx.write_nil(node_id)
3930 pub fn check_block_no_value<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, blk: &'tcx hir::Block) {
3931 check_block_with_expected(fcx, blk, ExpectHasType(fcx.tcx().mk_nil()));
3932 let blkty = fcx.node_ty(blk.id);
3933 if blkty.references_error() {
3934 fcx.write_error(blk.id);
3936 let nilty = fcx.tcx().mk_nil();
3937 demand::suptype(fcx, blk.span, nilty, blkty);
3941 fn check_block_with_expected<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
3942 blk: &'tcx hir::Block,
3943 expected: Expectation<'tcx>) {
3945 let mut fcx_ps = fcx.ps.borrow_mut();
3946 let unsafety_state = fcx_ps.recurse(blk);
3947 replace(&mut *fcx_ps, unsafety_state)
3950 let mut warned = false;
3951 let mut any_diverges = false;
3952 let mut any_err = false;
3953 for s in &blk.stmts {
3955 let s_id = ::rustc_front::util::stmt_id(s);
3956 let s_ty = fcx.node_ty(s_id);
3957 if any_diverges && !warned && match s.node {
3958 hir::StmtDecl(ref decl, _) => {
3960 hir::DeclLocal(_) => true,
3964 hir::StmtExpr(_, _) | hir::StmtSemi(_, _) => true,
3969 .add_lint(lint::builtin::UNREACHABLE_CODE,
3972 "unreachable statement".to_string());
3975 any_diverges = any_diverges || fcx.infcx().type_var_diverges(s_ty);
3976 any_err = any_err || s_ty.references_error();
3979 None => if any_err {
3980 fcx.write_error(blk.id);
3981 } else if any_diverges {
3982 fcx.write_ty(blk.id, fcx.infcx().next_diverging_ty_var());
3984 fcx.write_nil(blk.id);
3987 if any_diverges && !warned {
3991 .add_lint(lint::builtin::UNREACHABLE_CODE,
3994 "unreachable expression".to_string());
3996 let ety = match expected {
3997 ExpectHasType(ety) => {
3998 check_expr_coercable_to_type(fcx, &e, ety);
4002 check_expr_with_expectation(fcx, &e, expected);
4008 fcx.write_error(blk.id);
4009 } else if any_diverges {
4010 fcx.write_ty(blk.id, fcx.infcx().next_diverging_ty_var());
4012 fcx.write_ty(blk.id, ety);
4017 *fcx.ps.borrow_mut() = prev;
4020 /// Checks a constant appearing in a type. At the moment this is just the
4021 /// length expression in a fixed-length vector, but someday it might be
4022 /// extended to type-level numeric literals.
4023 fn check_const_in_type<'a,'tcx>(ccx: &'a CrateCtxt<'a,'tcx>,
4024 expr: &'tcx hir::Expr,
4025 expected_type: Ty<'tcx>) {
4026 let tables = RefCell::new(ty::Tables::empty());
4027 let inh = static_inherited_fields(ccx, &tables);
4028 let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(expected_type), expr.id);
4029 check_const_with_ty(&fcx, expr.span, expr, expected_type);
4032 fn check_const<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
4036 let tables = RefCell::new(ty::Tables::empty());
4037 let inh = static_inherited_fields(ccx, &tables);
4038 let rty = ccx.tcx.node_id_to_type(id);
4039 let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(rty), e.id);
4040 let declty = fcx.ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(id)).ty;
4041 check_const_with_ty(&fcx, sp, e, declty);
4044 fn check_const_with_ty<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4048 // Gather locals in statics (because of block expressions).
4049 // This is technically unnecessary because locals in static items are forbidden,
4050 // but prevents type checking from blowing up before const checking can properly
4052 GatherLocalsVisitor { fcx: fcx }.visit_expr(e);
4054 check_expr_with_hint(fcx, e, declty);
4055 demand::coerce(fcx, e.span, declty, e);
4057 fcx.select_all_obligations_and_apply_defaults();
4058 upvar::closure_analyze_const(&fcx, e);
4059 fcx.select_obligations_where_possible();
4061 fcx.select_all_obligations_or_error();
4063 regionck::regionck_expr(fcx, e);
4064 writeback::resolve_type_vars_in_expr(fcx, e);
4067 /// Checks whether a type can be represented in memory. In particular, it
4068 /// identifies types that contain themselves without indirection through a
4069 /// pointer, which would mean their size is unbounded.
4070 pub fn check_representable(tcx: &TyCtxt,
4072 item_id: ast::NodeId,
4073 _designation: &str) -> bool {
4074 let rty = tcx.node_id_to_type(item_id);
4076 // Check that it is possible to represent this type. This call identifies
4077 // (1) types that contain themselves and (2) types that contain a different
4078 // recursive type. It is only necessary to throw an error on those that
4079 // contain themselves. For case 2, there must be an inner type that will be
4080 // caught by case 1.
4081 match rty.is_representable(tcx, sp) {
4082 Representability::SelfRecursive => {
4083 let item_def_id = tcx.map.local_def_id(item_id);
4084 traits::recursive_type_with_infinite_size_error(tcx, item_def_id).emit();
4087 Representability::Representable | Representability::ContainsRecursive => (),
4092 pub fn check_simd(tcx: &TyCtxt, sp: Span, id: ast::NodeId) {
4093 let t = tcx.node_id_to_type(id);
4095 ty::TyStruct(def, substs) => {
4096 let fields = &def.struct_variant().fields;
4097 if fields.is_empty() {
4098 span_err!(tcx.sess, sp, E0075, "SIMD vector cannot be empty");
4101 let e = fields[0].ty(tcx, substs);
4102 if !fields.iter().all(|f| f.ty(tcx, substs) == e) {
4103 span_err!(tcx.sess, sp, E0076, "SIMD vector should be homogeneous");
4107 ty::TyParam(_) => { /* struct<T>(T, T, T, T) is ok */ }
4108 _ if e.is_machine() => { /* struct(u8, u8, u8, u8) is ok */ }
4110 span_err!(tcx.sess, sp, E0077,
4111 "SIMD vector element type should be machine type");
4120 pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
4122 vs: &'tcx [hir::Variant],
4124 fn do_check<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
4125 vs: &'tcx [hir::Variant],
4127 hint: attr::ReprAttr) {
4128 #![allow(trivial_numeric_casts)]
4130 let rty = ccx.tcx.node_id_to_type(id);
4131 let mut disr_vals: Vec<ty::Disr> = Vec::new();
4133 let tables = RefCell::new(ty::Tables::empty());
4134 let inh = static_inherited_fields(ccx, &tables);
4135 let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(rty), id);
4137 let repr_type_ty = ccx.tcx.enum_repr_type(Some(&hint)).to_ty(&ccx.tcx);
4139 if let Some(ref e) = v.node.disr_expr {
4140 check_const_with_ty(&fcx, e.span, e, repr_type_ty);
4144 let def_id = ccx.tcx.map.local_def_id(id);
4146 let variants = &ccx.tcx.lookup_adt_def(def_id).variants;
4147 for (v, variant) in vs.iter().zip(variants.iter()) {
4148 let current_disr_val = variant.disr_val;
4150 // Check for duplicate discriminant values
4151 match disr_vals.iter().position(|&x| x == current_disr_val) {
4153 let mut err = struct_span_err!(ccx.tcx.sess, v.span, E0081,
4154 "discriminant value `{}` already exists", disr_vals[i]);
4155 let variant_i_node_id = ccx.tcx.map.as_local_node_id(variants[i].did).unwrap();
4156 span_note!(&mut err, ccx.tcx.map.span(variant_i_node_id),
4157 "conflicting discriminant here");
4162 disr_vals.push(current_disr_val);
4166 let def_id = ccx.tcx.map.local_def_id(id);
4167 let hint = *ccx.tcx.lookup_repr_hints(def_id).get(0).unwrap_or(&attr::ReprAny);
4169 if hint != attr::ReprAny && vs.is_empty() {
4170 span_err!(ccx.tcx.sess, sp, E0084,
4171 "unsupported representation for zero-variant enum");
4174 do_check(ccx, vs, id, hint);
4176 check_representable(ccx.tcx, sp, id, "enum");
4179 // Returns the type parameter count and the type for the given definition.
4180 fn type_scheme_and_predicates_for_def<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4183 -> (TypeScheme<'tcx>, GenericPredicates<'tcx>) {
4185 Def::Local(_, nid) | Def::Upvar(_, nid, _, _) => {
4186 let typ = fcx.local_ty(sp, nid);
4187 (ty::TypeScheme { generics: ty::Generics::empty(), ty: typ },
4188 ty::GenericPredicates::empty())
4190 Def::Fn(id) | Def::Method(id) |
4191 Def::Static(id, _) | Def::Variant(_, id) |
4192 Def::Struct(id) | Def::Const(id) | Def::AssociatedConst(id) => {
4193 (fcx.tcx().lookup_item_type(id), fcx.tcx().lookup_predicates(id))
4198 Def::AssociatedTy(..) |
4202 Def::ForeignMod(..) |
4206 fcx.ccx.tcx.sess.span_bug(sp, &format!("expected value, found {:?}", defn));
4211 // Instantiates the given path, which must refer to an item with the given
4212 // number of type parameters and type.
4213 pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4214 segments: &[hir::PathSegment],
4215 type_scheme: TypeScheme<'tcx>,
4216 type_predicates: &ty::GenericPredicates<'tcx>,
4217 opt_self_ty: Option<Ty<'tcx>>,
4220 node_id: ast::NodeId) {
4221 debug!("instantiate_path(path={:?}, def={:?}, node_id={}, type_scheme={:?})",
4227 // We need to extract the type parameters supplied by the user in
4228 // the path `path`. Due to the current setup, this is a bit of a
4229 // tricky-process; the problem is that resolve only tells us the
4230 // end-point of the path resolution, and not the intermediate steps.
4231 // Luckily, we can (at least for now) deduce the intermediate steps
4232 // just from the end-point.
4234 // There are basically four cases to consider:
4236 // 1. Reference to a *type*, such as a struct or enum:
4238 // mod a { struct Foo<T> { ... } }
4240 // Because we don't allow types to be declared within one
4241 // another, a path that leads to a type will always look like
4242 // `a::b::Foo<T>` where `a` and `b` are modules. This implies
4243 // that only the final segment can have type parameters, and
4244 // they are located in the TypeSpace.
4246 // *Note:* Generally speaking, references to types don't
4247 // actually pass through this function, but rather the
4248 // `ast_ty_to_ty` function in `astconv`. However, in the case
4249 // of struct patterns (and maybe literals) we do invoke
4250 // `instantiate_path` to get the general type of an instance of
4251 // a struct. (In these cases, there are actually no type
4252 // parameters permitted at present, but perhaps we will allow
4253 // them in the future.)
4255 // 1b. Reference to an enum variant or tuple-like struct:
4257 // struct foo<T>(...)
4258 // enum E<T> { foo(...) }
4260 // In these cases, the parameters are declared in the type
4263 // 2. Reference to a *fn item*:
4267 // In this case, the path will again always have the form
4268 // `a::b::foo::<T>` where only the final segment should have
4269 // type parameters. However, in this case, those parameters are
4270 // declared on a value, and hence are in the `FnSpace`.
4272 // 3. Reference to a *method*:
4274 // impl<A> SomeStruct<A> {
4278 // Here we can have a path like
4279 // `a::b::SomeStruct::<A>::foo::<B>`, in which case parameters
4280 // may appear in two places. The penultimate segment,
4281 // `SomeStruct::<A>`, contains parameters in TypeSpace, and the
4282 // final segment, `foo::<B>` contains parameters in fn space.
4284 // 4. Reference to an *associated const*:
4286 // impl<A> AnotherStruct<A> {
4287 // const FOO: B = BAR;
4290 // The path in this case will look like
4291 // `a::b::AnotherStruct::<A>::FOO`, so the penultimate segment
4292 // only will have parameters in TypeSpace.
4294 // The first step then is to categorize the segments appropriately.
4296 assert!(!segments.is_empty());
4298 let mut ufcs_associated = None;
4299 let mut segment_spaces: Vec<_>;
4301 // Case 1 and 1b. Reference to a *type* or *enum variant*.
4307 Def::AssociatedTy(..) |
4310 Def::TyParam(..) => {
4311 // Everything but the final segment should have no
4312 // parameters at all.
4313 segment_spaces = vec![None; segments.len() - 1];
4314 segment_spaces.push(Some(subst::TypeSpace));
4317 // Case 2. Reference to a top-level value.
4320 Def::Static(..) => {
4321 segment_spaces = vec![None; segments.len() - 1];
4322 segment_spaces.push(Some(subst::FnSpace));
4325 // Case 3. Reference to a method.
4326 Def::Method(def_id) => {
4327 let container = fcx.tcx().impl_or_trait_item(def_id).container();
4329 ty::TraitContainer(trait_did) => {
4330 callee::check_legal_trait_for_method_call(fcx.ccx, span, trait_did)
4332 ty::ImplContainer(_) => {}
4335 if segments.len() >= 2 {
4336 segment_spaces = vec![None; segments.len() - 2];
4337 segment_spaces.push(Some(subst::TypeSpace));
4338 segment_spaces.push(Some(subst::FnSpace));
4340 // `<T>::method` will end up here, and so can `T::method`.
4341 let self_ty = opt_self_ty.expect("UFCS sugared method missing Self");
4342 segment_spaces = vec![Some(subst::FnSpace)];
4343 ufcs_associated = Some((container, self_ty));
4347 Def::AssociatedConst(def_id) => {
4348 let container = fcx.tcx().impl_or_trait_item(def_id).container();
4350 ty::TraitContainer(trait_did) => {
4351 callee::check_legal_trait_for_method_call(fcx.ccx, span, trait_did)
4353 ty::ImplContainer(_) => {}
4356 if segments.len() >= 2 {
4357 segment_spaces = vec![None; segments.len() - 2];
4358 segment_spaces.push(Some(subst::TypeSpace));
4359 segment_spaces.push(None);
4361 // `<T>::CONST` will end up here, and so can `T::CONST`.
4362 let self_ty = opt_self_ty.expect("UFCS sugared const missing Self");
4363 segment_spaces = vec![None];
4364 ufcs_associated = Some((container, self_ty));
4368 // Other cases. Various nonsense that really shouldn't show up
4369 // here. If they do, an error will have been reported
4370 // elsewhere. (I hope)
4372 Def::ForeignMod(..) |
4377 segment_spaces = vec![None; segments.len()];
4380 assert_eq!(segment_spaces.len(), segments.len());
4382 // In `<T as Trait<A, B>>::method`, `A` and `B` are mandatory, but
4383 // `opt_self_ty` can also be Some for `Foo::method`, where Foo's
4384 // type parameters are not mandatory.
4385 let require_type_space = opt_self_ty.is_some() && ufcs_associated.is_none();
4387 debug!("segment_spaces={:?}", segment_spaces);
4389 // Next, examine the definition, and determine how many type
4390 // parameters we expect from each space.
4391 let type_defs = &type_scheme.generics.types;
4392 let region_defs = &type_scheme.generics.regions;
4394 // Now that we have categorized what space the parameters for each
4395 // segment belong to, let's sort out the parameters that the user
4396 // provided (if any) into their appropriate spaces. We'll also report
4397 // errors if type parameters are provided in an inappropriate place.
4398 let mut substs = Substs::empty();
4399 for (opt_space, segment) in segment_spaces.iter().zip(segments) {
4402 prohibit_type_params(fcx.tcx(), slice::ref_slice(segment));
4406 push_explicit_parameters_from_segment_to_substs(fcx,
4416 if let Some(self_ty) = opt_self_ty {
4417 if type_defs.len(subst::SelfSpace) == 1 {
4418 substs.types.push(subst::SelfSpace, self_ty);
4422 // Now we have to compare the types that the user *actually*
4423 // provided against the types that were *expected*. If the user
4424 // did not provide any types, then we want to substitute inference
4425 // variables. If the user provided some types, we may still need
4426 // to add defaults. If the user provided *too many* types, that's
4428 for &space in &[subst::SelfSpace, subst::TypeSpace, subst::FnSpace] {
4429 adjust_type_parameters(fcx, span, space, type_defs,
4430 require_type_space, &mut substs);
4431 assert_eq!(substs.types.len(space), type_defs.len(space));
4433 adjust_region_parameters(fcx, span, space, region_defs, &mut substs);
4434 assert_eq!(substs.regions().len(space), region_defs.len(space));
4437 // The things we are substituting into the type should not contain
4438 // escaping late-bound regions, and nor should the base type scheme.
4439 assert!(!substs.has_regions_escaping_depth(0));
4440 assert!(!type_scheme.has_escaping_regions());
4442 // Add all the obligations that are required, substituting and
4443 // normalized appropriately.
4444 let bounds = fcx.instantiate_bounds(span, &substs, &type_predicates);
4445 fcx.add_obligations_for_parameters(
4446 traits::ObligationCause::new(span, fcx.body_id, traits::ItemObligation(def.def_id())),
4449 // Substitute the values for the type parameters into the type of
4450 // the referenced item.
4451 let ty_substituted = fcx.instantiate_type_scheme(span, &substs, &type_scheme.ty);
4454 if let Some((ty::ImplContainer(impl_def_id), self_ty)) = ufcs_associated {
4455 // In the case of `Foo<T>::method` and `<Foo<T>>::method`, if `method`
4456 // is inherent, there is no `Self` parameter, instead, the impl needs
4457 // type parameters, which we can infer by unifying the provided `Self`
4458 // with the substituted impl type.
4459 let impl_scheme = fcx.tcx().lookup_item_type(impl_def_id);
4460 assert_eq!(substs.types.len(subst::TypeSpace),
4461 impl_scheme.generics.types.len(subst::TypeSpace));
4462 assert_eq!(substs.regions().len(subst::TypeSpace),
4463 impl_scheme.generics.regions.len(subst::TypeSpace));
4465 let impl_ty = fcx.instantiate_type_scheme(span, &substs, &impl_scheme.ty);
4466 if fcx.mk_subty(false, TypeOrigin::Misc(span), self_ty, impl_ty).is_err() {
4467 fcx.tcx().sess.span_bug(span,
4469 "instantiate_path: (UFCS) {:?} was a subtype of {:?} but now is not?",
4475 debug!("instantiate_path: type of {:?} is {:?}",
4478 fcx.write_ty(node_id, ty_substituted);
4479 fcx.write_substs(node_id, ty::ItemSubsts { substs: substs });
4482 /// Finds the parameters that the user provided and adds them to `substs`. If too many
4483 /// parameters are provided, then reports an error and clears the output vector.
4485 /// We clear the output vector because that will cause the `adjust_XXX_parameters()` later to
4486 /// use inference variables. This seems less likely to lead to derived errors.
4488 /// Note that we *do not* check for *too few* parameters here. Due to the presence of defaults
4489 /// etc that is more complicated. I wanted however to do the reporting of *too many* parameters
4490 /// here because we can easily use the precise span of the N+1'th parameter.
4491 fn push_explicit_parameters_from_segment_to_substs<'a, 'tcx>(
4492 fcx: &FnCtxt<'a, 'tcx>,
4493 space: subst::ParamSpace,
4495 type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4496 region_defs: &VecPerParamSpace<ty::RegionParameterDef>,
4497 segment: &hir::PathSegment,
4498 substs: &mut Substs<'tcx>)
4500 match segment.parameters {
4501 hir::AngleBracketedParameters(ref data) => {
4502 push_explicit_angle_bracketed_parameters_from_segment_to_substs(
4503 fcx, space, type_defs, region_defs, data, substs);
4506 hir::ParenthesizedParameters(ref data) => {
4507 span_err!(fcx.tcx().sess, span, E0238,
4508 "parenthesized parameters may only be used with a trait");
4509 push_explicit_parenthesized_parameters_from_segment_to_substs(
4510 fcx, space, span, type_defs, data, substs);
4515 fn push_explicit_angle_bracketed_parameters_from_segment_to_substs<'a, 'tcx>(
4516 fcx: &FnCtxt<'a, 'tcx>,
4517 space: subst::ParamSpace,
4518 type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4519 region_defs: &VecPerParamSpace<ty::RegionParameterDef>,
4520 data: &hir::AngleBracketedParameterData,
4521 substs: &mut Substs<'tcx>)
4524 let type_count = type_defs.len(space);
4525 assert_eq!(substs.types.len(space), 0);
4526 for (i, typ) in data.types.iter().enumerate() {
4527 let t = fcx.to_ty(&typ);
4529 substs.types.push(space, t);
4530 } else if i == type_count {
4531 span_err!(fcx.tcx().sess, typ.span, E0087,
4532 "too many type parameters provided: \
4533 expected at most {} parameter{}, \
4534 found {} parameter{}",
4536 if type_count == 1 {""} else {"s"},
4538 if data.types.len() == 1 {""} else {"s"});
4539 substs.types.truncate(space, 0);
4545 if !data.bindings.is_empty() {
4546 span_err!(fcx.tcx().sess, data.bindings[0].span, E0182,
4547 "unexpected binding of associated item in expression path \
4548 (only allowed in type paths)");
4552 let region_count = region_defs.len(space);
4553 assert_eq!(substs.regions().len(space), 0);
4554 for (i, lifetime) in data.lifetimes.iter().enumerate() {
4555 let r = ast_region_to_region(fcx.tcx(), lifetime);
4556 if i < region_count {
4557 substs.mut_regions().push(space, r);
4558 } else if i == region_count {
4559 span_err!(fcx.tcx().sess, lifetime.span, E0088,
4560 "too many lifetime parameters provided: \
4561 expected {} parameter{}, found {} parameter{}",
4563 if region_count == 1 {""} else {"s"},
4564 data.lifetimes.len(),
4565 if data.lifetimes.len() == 1 {""} else {"s"});
4566 substs.mut_regions().truncate(space, 0);
4574 /// `push_explicit_angle_bracketed_parameters_from_segment_to_substs`,
4575 /// but intended for `Foo(A,B) -> C` form. This expands to
4576 /// roughly the same thing as `Foo<(A,B),C>`. One important
4577 /// difference has to do with the treatment of anonymous
4578 /// regions, which are translated into bound regions (NYI).
4579 fn push_explicit_parenthesized_parameters_from_segment_to_substs<'a, 'tcx>(
4580 fcx: &FnCtxt<'a, 'tcx>,
4581 space: subst::ParamSpace,
4583 type_defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4584 data: &hir::ParenthesizedParameterData,
4585 substs: &mut Substs<'tcx>)
4587 let type_count = type_defs.len(space);
4589 span_err!(fcx.tcx().sess, span, E0167,
4590 "parenthesized form always supplies 2 type parameters, \
4591 but only {} parameter(s) were expected",
4595 let input_tys: Vec<Ty> =
4596 data.inputs.iter().map(|ty| fcx.to_ty(&ty)).collect();
4598 let tuple_ty = fcx.tcx().mk_tup(input_tys);
4600 if type_count >= 1 {
4601 substs.types.push(space, tuple_ty);
4604 let output_ty: Option<Ty> =
4605 data.output.as_ref().map(|ty| fcx.to_ty(&ty));
4608 output_ty.unwrap_or(fcx.tcx().mk_nil());
4610 if type_count >= 2 {
4611 substs.types.push(space, output_ty);
4615 fn adjust_type_parameters<'a, 'tcx>(
4616 fcx: &FnCtxt<'a, 'tcx>,
4619 defs: &VecPerParamSpace<ty::TypeParameterDef<'tcx>>,
4620 require_type_space: bool,
4621 substs: &mut Substs<'tcx>)
4623 let provided_len = substs.types.len(space);
4624 let desired = defs.get_slice(space);
4625 let required_len = desired.iter()
4626 .take_while(|d| d.default.is_none())
4629 debug!("adjust_type_parameters(space={:?}, \
4638 // Enforced by `push_explicit_parameters_from_segment_to_substs()`.
4639 assert!(provided_len <= desired.len());
4641 // Nothing specified at all: supply inference variables for
4643 if provided_len == 0 && !(require_type_space && space == subst::TypeSpace) {
4644 substs.types.replace(space, Vec::new());
4645 fcx.infcx().type_vars_for_defs(span, space, substs, &desired[..]);
4649 // Too few parameters specified: report an error and use Err
4651 if provided_len < required_len {
4653 if desired.len() != required_len { "at least " } else { "" };
4654 span_err!(fcx.tcx().sess, span, E0089,
4655 "too few type parameters provided: expected {}{} parameter{}, \
4656 found {} parameter{}",
4657 qualifier, required_len,
4658 if required_len == 1 {""} else {"s"},
4660 if provided_len == 1 {""} else {"s"});
4661 substs.types.replace(space, vec![fcx.tcx().types.err; desired.len()]);
4665 // Otherwise, add in any optional parameters that the user
4666 // omitted. The case of *too many* parameters is handled
4668 // push_explicit_parameters_from_segment_to_substs(). Note
4669 // that the *default* type are expressed in terms of all prior
4670 // parameters, so we have to substitute as we go with the
4671 // partial substitution that we have built up.
4672 for i in provided_len..desired.len() {
4673 let default = desired[i].default.unwrap();
4674 let default = default.subst_spanned(fcx.tcx(), substs, Some(span));
4675 substs.types.push(space, default);
4677 assert_eq!(substs.types.len(space), desired.len());
4679 debug!("Final substs: {:?}", substs);
4682 fn adjust_region_parameters(
4686 defs: &VecPerParamSpace<ty::RegionParameterDef>,
4687 substs: &mut Substs)
4689 let provided_len = substs.mut_regions().len(space);
4690 let desired = defs.get_slice(space);
4692 // Enforced by `push_explicit_parameters_from_segment_to_substs()`.
4693 assert!(provided_len <= desired.len());
4695 // If nothing was provided, just use inference variables.
4696 if provided_len == 0 {
4697 substs.mut_regions().replace(
4699 fcx.infcx().region_vars_for_defs(span, desired));
4703 // If just the right number were provided, everybody is happy.
4704 if provided_len == desired.len() {
4708 // Otherwise, too few were provided. Report an error and then
4709 // use inference variables.
4710 span_err!(fcx.tcx().sess, span, E0090,
4711 "too few lifetime parameters provided: expected {} parameter{}, \
4712 found {} parameter{}",
4714 if desired.len() == 1 {""} else {"s"},
4716 if provided_len == 1 {""} else {"s"});
4718 substs.mut_regions().replace(
4720 fcx.infcx().region_vars_for_defs(span, desired));
4724 fn structurally_resolve_type_or_else<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
4728 where F: Fn() -> Ty<'tcx>
4730 let mut ty = fcx.resolve_type_vars_if_possible(ty);
4733 let alternative = f();
4736 if alternative.is_ty_var() || alternative.references_error() {
4737 fcx.type_error_message(sp, |_actual| {
4738 "the type of this value must be known in this context".to_string()
4740 demand::suptype(fcx, sp, fcx.tcx().types.err, ty);
4741 ty = fcx.tcx().types.err;
4743 demand::suptype(fcx, sp, alternative, ty);
4751 // Resolves `typ` by a single level if `typ` is a type variable. If no
4752 // resolution is possible, then an error is reported.
4753 pub fn structurally_resolved_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
4758 structurally_resolve_type_or_else(fcx, sp, ty, || {
4763 // Returns true if b contains a break that can exit from b
4764 pub fn may_break(cx: &TyCtxt, id: ast::NodeId, b: &hir::Block) -> bool {
4765 // First: is there an unlabeled break immediately
4767 (loop_query(&b, |e| {
4769 hir::ExprBreak(None) => true,
4773 // Second: is there a labeled break with label
4774 // <id> nested anywhere inside the loop?
4775 (block_query(b, |e| {
4776 if let hir::ExprBreak(Some(_)) = e.node {
4777 lookup_full_def(cx, e.span, e.id) == Def::Label(id)
4784 pub fn check_bounds_are_used<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
4785 tps: &[hir::TyParam],
4787 debug!("check_bounds_are_used(n_tps={}, ty={:?})",
4790 // make a vector of booleans initially false, set to true when used
4791 if tps.is_empty() { return; }
4792 let mut tps_used = vec![false; tps.len()];
4794 for leaf_ty in ty.walk() {
4795 if let ty::TyParam(ParamTy {idx, ..}) = leaf_ty.sty {
4796 debug!("Found use of ty param num {}", idx);
4797 tps_used[idx as usize] = true;
4801 for (i, b) in tps_used.iter().enumerate() {
4803 span_err!(ccx.tcx.sess, tps[i].span, E0091,
4804 "type parameter `{}` is unused",