1 // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
12 * Handles translation of callees as well as other call-related
13 * things. Callees are a superset of normal rust values and sometimes
14 * have different representations. In particular, top-level fn items
15 * and methods are represented as just a fn ptr and not a full
19 use arena::TypedArena;
23 use llvm::{ValueRef, get_param};
25 use metadata::csearch;
28 use middle::subst::{Subst};
29 use middle::trans::adt;
30 use middle::trans::base;
31 use middle::trans::base::*;
32 use middle::trans::build::*;
33 use middle::trans::callee;
34 use middle::trans::cleanup;
35 use middle::trans::cleanup::CleanupMethods;
36 use middle::trans::closure;
37 use middle::trans::common;
38 use middle::trans::common::*;
39 use middle::trans::datum::*;
40 use middle::trans::expr;
41 use middle::trans::glue;
42 use middle::trans::inline;
43 use middle::trans::foreign;
44 use middle::trans::intrinsic;
45 use middle::trans::meth;
46 use middle::trans::monomorphize;
47 use middle::trans::type_::Type;
48 use middle::trans::type_of;
50 use middle::typeck::coherence::make_substs_for_receiver_types;
51 use middle::typeck::MethodCall;
52 use util::ppaux::Repr;
53 use util::ppaux::ty_to_string;
55 use syntax::abi as synabi;
60 pub struct MethodData {
66 Closure(Datum<Lvalue>),
68 // Constructor for enum variant/tuple-like-struct
70 NamedTupleConstructor(subst::Substs, ty::Disr),
72 // Represents a (possibly monomorphized) top-level fn item or method
73 // item. Note that this is just the fn-ptr and is not a Rust closure
74 // value (which is a pair).
75 Fn(/* llfn */ ValueRef),
77 Intrinsic(ast::NodeId, subst::Substs),
82 pub struct Callee<'blk, 'tcx: 'blk> {
83 pub bcx: Block<'blk, 'tcx>,
87 fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr)
88 -> Callee<'blk, 'tcx> {
89 let _icx = push_ctxt("trans_callee");
90 debug!("callee::trans(expr={})", expr.repr(bcx.tcx()));
92 // pick out special kinds of expressions that can be called:
95 return trans_def(bcx, bcx.def(expr.id), expr);
100 // any other expressions are closures:
101 return datum_callee(bcx, expr);
103 fn datum_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr)
104 -> Callee<'blk, 'tcx> {
105 let DatumBlock {mut bcx, datum} = expr::trans(bcx, expr);
106 match ty::get(datum.ty).sty {
107 ty::ty_bare_fn(..) => {
108 let llval = datum.to_llscalarish(bcx);
114 ty::ty_closure(..) => {
115 let datum = unpack_datum!(
116 bcx, datum.to_lvalue_datum(bcx, "callee", expr.id));
119 data: Closure(datum),
123 bcx.tcx().sess.span_bug(
125 format!("type of callee is neither bare-fn nor closure: \
127 bcx.ty_to_string(datum.ty)).as_slice());
132 fn fn_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, llfn: ValueRef)
133 -> Callee<'blk, 'tcx> {
140 fn trans_def<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
142 ref_expr: &ast::Expr)
143 -> Callee<'blk, 'tcx> {
144 debug!("trans_def(def={}, ref_expr={})", def.repr(bcx.tcx()), ref_expr.repr(bcx.tcx()));
145 let expr_ty = node_id_type(bcx, ref_expr.id);
147 def::DefFn(did, _) if {
148 let maybe_def_id = inline::get_local_instance(bcx.ccx(), did);
149 let maybe_ast_node = maybe_def_id.and_then(|def_id| bcx.tcx().map
151 match maybe_ast_node {
152 Some(ast_map::NodeStructCtor(_)) => true,
156 let substs = node_id_substs(bcx, ExprId(ref_expr.id));
159 data: NamedTupleConstructor(substs, 0)
162 def::DefFn(did, _) if match ty::get(expr_ty).sty {
163 ty::ty_bare_fn(ref f) => f.abi == synabi::RustIntrinsic,
166 let substs = node_id_substs(bcx, ExprId(ref_expr.id));
167 let def_id = inline::maybe_instantiate_inline(bcx.ccx(), did);
168 Callee { bcx: bcx, data: Intrinsic(def_id.node, substs) }
170 def::DefFn(did, _) | def::DefMethod(did, _, def::FromImpl(_)) |
171 def::DefStaticMethod(did, def::FromImpl(_)) => {
172 fn_callee(bcx, trans_fn_ref(bcx, did, ExprId(ref_expr.id)))
174 def::DefStaticMethod(meth_did, def::FromTrait(trait_did)) |
175 def::DefMethod(meth_did, _, def::FromTrait(trait_did)) => {
176 fn_callee(bcx, meth::trans_static_method_callee(bcx, meth_did,
180 def::DefVariant(tid, vid, _) => {
181 let vinfo = ty::enum_variant_with_id(bcx.tcx(), tid, vid);
182 let substs = node_id_substs(bcx, ExprId(ref_expr.id));
184 // Nullary variants are not callable
185 assert!(vinfo.args.len() > 0u);
189 data: NamedTupleConstructor(substs, vinfo.disr_val)
192 def::DefStruct(_) => {
193 let substs = node_id_substs(bcx, ExprId(ref_expr.id));
196 data: NamedTupleConstructor(substs, 0)
202 def::DefUpvar(..) => {
203 datum_callee(bcx, ref_expr)
205 def::DefMod(..) | def::DefForeignMod(..) | def::DefTrait(..) |
206 def::DefTy(..) | def::DefPrimTy(..) | def::DefAssociatedTy(..) |
207 def::DefUse(..) | def::DefTyParamBinder(..) |
208 def::DefRegion(..) | def::DefLabel(..) | def::DefTyParam(..) |
209 def::DefSelfTy(..) => {
210 bcx.tcx().sess.span_bug(
212 format!("cannot translate def {} \
213 to a callable thing!", def).as_slice());
219 pub fn trans_fn_ref(bcx: Block, def_id: ast::DefId, node: ExprOrMethodCall) -> ValueRef {
221 * Translates a reference (with id `ref_id`) to the fn/method
222 * with id `def_id` into a function pointer. This may require
223 * monomorphization or inlining.
226 let _icx = push_ctxt("trans_fn_ref");
228 let substs = node_id_substs(bcx, node);
229 debug!("trans_fn_ref(def_id={}, node={}, substs={})",
230 def_id.repr(bcx.tcx()),
232 substs.repr(bcx.tcx()));
233 trans_fn_ref_with_substs(bcx, def_id, node, substs)
236 fn trans_fn_ref_with_substs_to_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
239 substs: subst::Substs)
240 -> Callee<'blk, 'tcx> {
243 data: Fn(trans_fn_ref_with_substs(bcx,
250 /// Translates the adapter that deconstructs a `Box<Trait>` object into
251 /// `Trait` so that a by-value self method can be called.
252 pub fn trans_unboxing_shim(bcx: Block,
253 llshimmedfn: ValueRef,
255 method_id: ast::DefId,
256 substs: subst::Substs)
258 let _icx = push_ctxt("trans_unboxing_shim");
262 // Transform the self type to `Box<self_type>`.
263 let self_type = fty.sig.inputs[0];
264 let boxed_self_type = ty::mk_uniq(tcx, self_type);
265 let boxed_function_type = ty::FnSig {
266 binder_id: fty.sig.binder_id,
267 inputs: fty.sig.inputs.iter().enumerate().map(|(i, typ)| {
274 output: fty.sig.output,
277 let boxed_function_type = ty::BareFnTy {
278 fn_style: fty.fn_style,
280 sig: boxed_function_type,
282 let boxed_function_type =
283 ty::mk_bare_fn(tcx, boxed_function_type).subst(tcx, &substs);
284 let function_type = match fty.abi {
285 synabi::RustCall => {
286 // We're passing through to a RustCall ABI function, but
287 // because the shim will already perform untupling, we
288 // need to pretend the shimmed function does not use
289 // RustCall so the untupled arguments can be passed
290 // through verbatim. This is kind of ugly.
291 let fake_ty = ty::FnSig {
292 binder_id: fty.sig.binder_id,
293 inputs: type_of::untuple_arguments_if_necessary(ccx,
294 fty.sig.inputs.as_slice(),
296 output: fty.sig.output,
299 let fake_ty = ty::BareFnTy {
300 fn_style: fty.fn_style,
304 ty::mk_bare_fn(tcx, fake_ty).subst(tcx, &substs)
307 ty::mk_bare_fn(tcx, (*fty).clone()).subst(tcx, &substs)
311 let function_name = ty::with_path(tcx, method_id, |path| {
312 link::mangle_internal_name_by_path_and_seq(path, "unboxing_shim")
314 let llfn = decl_internal_rust_fn(ccx,
316 function_name.as_slice());
318 let block_arena = TypedArena::new();
319 let empty_param_substs = param_substs::empty();
320 let return_type = ty::ty_fn_ret(boxed_function_type);
321 let fcx = new_fn_ctxt(ccx,
329 let mut bcx = init_function(&fcx, false, return_type);
331 // Create the substituted versions of the self type.
332 let arg_scope = fcx.push_custom_cleanup_scope();
333 let arg_scope_id = cleanup::CustomScope(arg_scope);
334 let boxed_arg_types = ty::ty_fn_args(boxed_function_type);
335 let boxed_self_type = boxed_arg_types[0];
336 let arg_types = ty::ty_fn_args(function_type);
337 let self_type = arg_types[0];
338 let boxed_self_kind = arg_kind(&fcx, boxed_self_type);
340 // Create a datum for self.
341 let llboxedself = get_param(fcx.llfn, fcx.arg_pos(0) as u32);
342 let llboxedself = Datum::new(llboxedself,
347 llboxedself.to_lvalue_datum_in_scope(bcx,
351 // This `Load` is needed because lvalue data are always by-ref.
352 let llboxedself = Load(bcx, boxed_self.val);
354 let llself = if type_is_immediate(ccx, self_type) {
355 let llboxedself = Load(bcx, llboxedself);
356 immediate_rvalue(llboxedself, self_type)
358 let llself = rvalue_scratch_datum(bcx, self_type, "self");
359 memcpy_ty(bcx, llself.val, llboxedself, self_type);
363 // Make sure we don't free the box twice!
364 boxed_self.kind.post_store(bcx, boxed_self.val, boxed_self_type);
366 // Schedule a cleanup to free the box.
367 fcx.schedule_free_value(arg_scope_id,
369 cleanup::HeapExchange,
372 // Now call the function.
373 let mut llshimmedargs = vec!(llself.val);
374 for i in range(1, arg_types.len()) {
375 llshimmedargs.push(get_param(fcx.llfn, fcx.arg_pos(i) as u32));
377 assert!(!fcx.needs_ret_allocas);
378 let dest = fcx.llretslotptr.get().map(|_|
379 expr::SaveIn(fcx.get_ret_slot(bcx, return_type, "ret_slot"))
381 bcx = trans_call_inner(bcx,
387 data: Fn(llshimmedfn),
390 ArgVals(llshimmedargs.as_slice()),
393 bcx = fcx.pop_and_trans_custom_cleanup_scope(bcx, arg_scope);
394 finish_fn(&fcx, bcx, return_type);
399 pub fn trans_fn_ref_with_substs(
401 def_id: ast::DefId, // def id of fn
402 node: ExprOrMethodCall, // node id of use of fn; may be zero if N/A
403 substs: subst::Substs) // vtables for the call
407 * Translates a reference to a fn/method item, monomorphizing and
408 * inlining as it goes.
412 * - `bcx`: the current block where the reference to the fn occurs
413 * - `def_id`: def id of the fn or method item being referenced
414 * - `node`: node id of the reference to the fn/method, if applicable.
415 * This parameter may be zero; but, if so, the resulting value may not
416 * have the right type, so it must be cast before being used.
417 * - `substs`: values for each of the fn/method's parameters
420 let _icx = push_ctxt("trans_fn_ref_with_substs");
424 debug!("trans_fn_ref_with_substs(bcx={}, def_id={}, node={}, \
431 assert!(substs.types.all(|t| !ty::type_needs_infer(*t)));
433 // Load the info for the appropriate trait if necessary.
434 match ty::trait_of_item(tcx, def_id) {
437 ty::populate_implementations_for_trait_if_necessary(tcx, trait_id)
441 // We need to do a bunch of special handling for default methods.
442 // We need to modify the def_id and our substs in order to monomorphize
444 let (is_default, def_id, substs) = match ty::provided_source(tcx, def_id) {
445 None => (false, def_id, substs),
447 // There are two relevant substitutions when compiling
448 // default methods. First, there is the substitution for
449 // the type parameters of the impl we are using and the
450 // method we are calling. This substitution is the substs
451 // argument we already have.
452 // In order to compile a default method, though, we need
453 // to consider another substitution: the substitution for
454 // the type parameters on trait; the impl we are using
455 // implements the trait at some particular type
456 // parameters, and we need to substitute for those first.
457 // So, what we need to do is find this substitution and
458 // compose it with the one we already have.
460 let impl_id = ty::impl_or_trait_item(tcx, def_id).container()
462 let impl_or_trait_item = ty::impl_or_trait_item(tcx, source_id);
463 match impl_or_trait_item {
464 ty::MethodTraitItem(method) => {
465 let trait_ref = ty::impl_trait_ref(tcx, impl_id)
466 .expect("could not find trait_ref for impl with \
469 // Compute the first substitution
470 let first_subst = make_substs_for_receiver_types(
471 tcx, &*trait_ref, &*method);
474 let new_substs = first_subst.subst(tcx, &substs);
476 debug!("trans_fn_with_vtables - default method: \
477 substs = {}, trait_subst = {}, \
478 first_subst = {}, new_subst = {}",
479 substs.repr(tcx), trait_ref.substs.repr(tcx),
480 first_subst.repr(tcx), new_substs.repr(tcx));
482 (true, source_id, new_substs)
484 ty::TypeTraitItem(_) => {
485 bcx.tcx().sess.bug("trans_fn_ref_with_vtables() tried \
486 to translate an associated type?!")
492 // If this is an unboxed closure, redirect to it.
493 match closure::get_or_create_declaration_if_unboxed_closure(bcx,
497 Some(llfn) => return llfn,
500 // Check whether this fn has an inlined copy and, if so, redirect
501 // def_id to the local id of the inlined copy.
502 let def_id = inline::maybe_instantiate_inline(ccx, def_id);
504 // We must monomorphise if the fn has type parameters, is a default method,
505 // or is a named tuple constructor.
506 let must_monomorphise = if !substs.types.is_empty() || is_default {
508 } else if def_id.krate == ast::LOCAL_CRATE {
509 let map_node = session::expect(
511 tcx.map.find(def_id.node),
512 || "local item should be in ast map".to_string());
515 ast_map::NodeVariant(v) => match v.node.kind {
516 ast::TupleVariantKind(ref args) => args.len() > 0,
519 ast_map::NodeStructCtor(_) => true,
526 // Create a monomorphic version of generic functions
527 if must_monomorphise {
528 // Should be either intra-crate or inlined.
529 assert_eq!(def_id.krate, ast::LOCAL_CRATE);
531 let opt_ref_id = match node {
532 ExprId(id) => if id != 0 { Some(id) } else { None },
533 MethodCall(_) => None,
536 let (val, must_cast) =
537 monomorphize::monomorphic_fn(ccx, def_id, &substs, opt_ref_id);
539 if must_cast && node != ExprId(0) {
540 // Monotype of the REFERENCE to the function (type params
542 let ref_ty = match node {
543 ExprId(id) => node_id_type(bcx, id),
544 MethodCall(method_call) => {
545 let t = (*bcx.tcx().method_map.borrow())[method_call].ty;
546 monomorphize_type(bcx, t)
551 bcx, val, type_of::type_of_fn_from_ty(ccx, ref_ty).ptr_to());
556 // Polytype of the function item (may have type params)
557 let fn_tpt = ty::lookup_item_type(tcx, def_id);
559 // Find the actual function pointer.
561 if def_id.krate == ast::LOCAL_CRATE {
562 // Internal reference.
563 get_item_val(ccx, def_id.node)
565 // External reference.
566 trans_external_path(ccx, def_id, fn_tpt.ty)
570 // This is subtle and surprising, but sometimes we have to bitcast
571 // the resulting fn pointer. The reason has to do with external
572 // functions. If you have two crates that both bind the same C
573 // library, they may not use precisely the same types: for
574 // example, they will probably each declare their own structs,
575 // which are distinct types from LLVM's point of view (nominal
578 // Now, if those two crates are linked into an application, and
579 // they contain inlined code, you can wind up with a situation
580 // where both of those functions wind up being loaded into this
581 // application simultaneously. In that case, the same function
582 // (from LLVM's point of view) requires two types. But of course
583 // LLVM won't allow one function to have two types.
585 // What we currently do, therefore, is declare the function with
586 // one of the two types (whichever happens to come first) and then
587 // bitcast as needed when the function is referenced to make sure
588 // it has the type we expect.
590 // This can occur on either a crate-local or crate-external
591 // reference. It also occurs when testing libcore and in some
592 // other weird situations. Annoying.
593 let llty = type_of::type_of_fn_from_ty(ccx, fn_tpt.ty);
594 let llptrty = llty.ptr_to();
595 if val_ty(val) != llptrty {
596 debug!("trans_fn_ref_with_vtables(): casting pointer!");
597 val = BitCast(bcx, val, llptrty);
599 debug!("trans_fn_ref_with_vtables(): not casting pointer!");
605 // ______________________________________________________________________
608 pub fn trans_call<'blk, 'tcx>(in_cx: Block<'blk, 'tcx>,
613 -> Block<'blk, 'tcx> {
614 let _icx = push_ctxt("trans_call");
615 trans_call_inner(in_cx,
616 Some(common::expr_info(call_ex)),
618 |cx, _| trans(cx, f),
623 pub fn trans_method_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
628 -> Block<'blk, 'tcx> {
629 let _icx = push_ctxt("trans_method_call");
630 debug!("trans_method_call(call_ex={})", call_ex.repr(bcx.tcx()));
631 let method_call = MethodCall::expr(call_ex.id);
632 let method_ty = (*bcx.tcx().method_map.borrow())[method_call].ty;
635 Some(common::expr_info(call_ex)),
636 monomorphize_type(bcx, method_ty),
637 |cx, arg_cleanup_scope| {
638 meth::trans_method_callee(cx, method_call, Some(rcvr), arg_cleanup_scope)
644 pub fn trans_lang_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
647 dest: Option<expr::Dest>)
648 -> Result<'blk, 'tcx> {
649 let fty = if did.krate == ast::LOCAL_CRATE {
650 ty::node_id_to_type(bcx.tcx(), did.node)
652 csearch::get_type(bcx.tcx(), did).ty
654 callee::trans_call_inner(bcx,
658 trans_fn_ref_with_substs_to_callee(bcx,
661 subst::Substs::empty())
667 pub fn trans_call_inner<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
668 call_info: Option<NodeInfo>,
670 get_callee: |bcx: Block<'blk, 'tcx>,
671 arg_cleanup_scope: cleanup::ScopeId|
672 -> Callee<'blk, 'tcx>,
674 dest: Option<expr::Dest>)
675 -> Result<'blk, 'tcx> {
677 * This behemoth of a function translates function calls.
678 * Unfortunately, in order to generate more efficient LLVM
679 * output at -O0, it has quite a complex signature (refactoring
680 * this into two functions seems like a good idea).
682 * In particular, for lang items, it is invoked with a dest of
683 * None, and in that case the return value contains the result of
684 * the fn. The lang item must not return a structural type or else
685 * all heck breaks loose.
687 * For non-lang items, `dest` is always Some, and hence the result
688 * is written into memory somewhere. Nonetheless we return the
689 * actual return value of the function.
692 // Introduce a temporary cleanup scope that will contain cleanups
693 // for the arguments while they are being evaluated. The purpose
694 // this cleanup is to ensure that, should a panic occur while
695 // evaluating argument N, the values for arguments 0...N-1 are all
696 // cleaned up. If no panic occurs, the values are handed off to
697 // the callee, and hence none of the cleanups in this temporary
698 // scope will ever execute.
701 let arg_cleanup_scope = fcx.push_custom_cleanup_scope();
703 let callee = get_callee(bcx, cleanup::CustomScope(arg_cleanup_scope));
704 let mut bcx = callee.bcx;
706 let (abi, ret_ty) = match ty::get(callee_ty).sty {
707 ty::ty_bare_fn(ref f) => (f.abi, f.sig.output),
708 ty::ty_closure(ref f) => (f.abi, f.sig.output),
709 _ => panic!("expected bare rust fn or closure in trans_call_inner")
712 let (llfn, llenv, llself) = match callee.data {
717 (d.llfn, None, Some(d.llself))
720 // Closures are represented as (llfn, llclosure) pair:
721 // load the requisite values out.
722 let pair = d.to_llref();
723 let llfn = GEPi(bcx, pair, [0u, abi::fn_field_code]);
724 let llfn = Load(bcx, llfn);
725 let llenv = GEPi(bcx, pair, [0u, abi::fn_field_box]);
726 let llenv = Load(bcx, llenv);
727 (llfn, Some(llenv), None)
729 Intrinsic(node, substs) => {
730 assert!(abi == synabi::RustIntrinsic);
731 assert!(dest.is_some());
733 let call_info = call_info.expect("no call info for intrinsic call?");
734 return intrinsic::trans_intrinsic_call(bcx, node, callee_ty,
735 arg_cleanup_scope, args,
736 dest.unwrap(), substs,
739 NamedTupleConstructor(substs, disr) => {
740 assert!(dest.is_some());
741 fcx.pop_custom_cleanup_scope(arg_cleanup_scope);
743 let ctor_ty = callee_ty.subst(bcx.tcx(), &substs);
744 return base::trans_named_tuple_constructor(bcx,
753 // Intrinsics should not become actual functions.
754 // We trans them in place in `trans_intrinsic_call`
755 assert!(abi != synabi::RustIntrinsic);
757 let is_rust_fn = abi == synabi::Rust || abi == synabi::RustCall;
759 // Generate a location to store the result. If the user does
760 // not care about the result, just make a stack slot.
761 let opt_llretslot = dest.and_then(|dest| match dest {
762 expr::SaveIn(dst) => Some(dst),
764 let ret_ty = match ret_ty {
765 ty::FnConverging(ret_ty) => ret_ty,
766 ty::FnDiverging => ty::mk_nil()
769 type_of::return_uses_outptr(ccx, ret_ty) ||
770 ty::type_needs_drop(bcx.tcx(), ret_ty) {
771 // Push the out-pointer if we use an out-pointer for this
772 // return type, otherwise push "undef".
773 if type_is_zero_size(ccx, ret_ty) {
774 let llty = type_of::type_of(ccx, ret_ty);
775 Some(C_undef(llty.ptr_to()))
777 Some(alloc_ty(bcx, ret_ty, "__llret"))
785 let mut llresult = unsafe {
786 llvm::LLVMGetUndef(Type::nil(ccx).ptr_to().to_ref())
789 // The code below invokes the function, using either the Rust
790 // conventions (if it is a rust fn) or the native conventions
791 // (otherwise). The important part is that, when all is said
792 // and done, either the return value of the function will have been
793 // written in opt_llretslot (if it is Some) or `llresult` will be
794 // set appropriately (otherwise).
796 let mut llargs = Vec::new();
798 if let (ty::FnConverging(ret_ty), Some(llretslot)) = (ret_ty, opt_llretslot) {
799 if type_of::return_uses_outptr(ccx, ret_ty) {
800 llargs.push(llretslot);
804 // Push the environment (or a trait object's self).
805 match (llenv, llself) {
806 (Some(llenv), None) => llargs.push(llenv),
807 (None, Some(llself)) => llargs.push(llself),
811 // Push the arguments.
812 bcx = trans_args(bcx,
816 cleanup::CustomScope(arg_cleanup_scope),
820 fcx.scopes.borrow_mut().last_mut().unwrap().drop_non_lifetime_clean();
822 // Invoke the actual rust fn and update bcx/llresult.
823 let (llret, b) = base::invoke(bcx,
832 // If the Rust convention for this type is return via
833 // the return value, copy it into llretslot.
834 match (opt_llretslot, ret_ty) {
835 (Some(llretslot), ty::FnConverging(ret_ty)) => {
836 if !type_of::return_uses_outptr(bcx.ccx(), ret_ty) &&
837 !type_is_zero_size(bcx.ccx(), ret_ty)
839 store_ty(bcx, llret, llretslot, ret_ty)
845 // Lang items are the only case where dest is None, and
846 // they are always Rust fns.
847 assert!(dest.is_some());
849 let mut llargs = Vec::new();
850 let arg_tys = match args {
851 ArgExprs(a) => a.iter().map(|x| expr_ty(bcx, &**x)).collect(),
852 _ => panic!("expected arg exprs.")
854 bcx = trans_args(bcx,
858 cleanup::CustomScope(arg_cleanup_scope),
861 fcx.scopes.borrow_mut().last_mut().unwrap().drop_non_lifetime_clean();
863 bcx = foreign::trans_native_call(bcx, callee_ty,
864 llfn, opt_llretslot.unwrap(),
865 llargs.as_slice(), arg_tys);
868 fcx.pop_and_trans_custom_cleanup_scope(bcx, arg_cleanup_scope);
870 // If the caller doesn't care about the result of this fn call,
871 // drop the temporary slot we made.
872 match (dest, opt_llretslot, ret_ty) {
873 (Some(expr::Ignore), Some(llretslot), ty::FnConverging(ret_ty)) => {
874 // drop the value if it is not being saved.
875 bcx = glue::drop_ty(bcx, llretslot, ret_ty, call_info);
876 call_lifetime_end(bcx, llretslot);
881 if ret_ty == ty::FnDiverging {
885 Result::new(bcx, llresult)
888 pub enum CallArgs<'a> {
889 // Supply value of arguments as a list of expressions that must be
890 // translated. This is used in the common case of `foo(bar, qux)`.
891 ArgExprs(&'a [P<ast::Expr>]),
893 // Supply value of arguments as a list of LLVM value refs; frequently
894 // used with lang items and so forth, when the argument is an internal
896 ArgVals(&'a [ValueRef]),
898 // For overloaded operators: `(lhs, Vec(rhs, rhs_id))`. `lhs`
899 // is the left-hand-side and `rhs/rhs_id` is the datum/expr-id of
900 // the right-hand-side arguments (if any).
901 ArgOverloadedOp(Datum<Expr>, Vec<(Datum<Expr>, ast::NodeId)>),
903 // Supply value of arguments as a list of expressions that must be
904 // translated, for overloaded call operators.
905 ArgOverloadedCall(Vec<&'a ast::Expr>),
908 fn trans_args_under_call_abi<'blk, 'tcx>(
909 mut bcx: Block<'blk, 'tcx>,
910 arg_exprs: &[P<ast::Expr>],
912 llargs: &mut Vec<ValueRef>,
913 arg_cleanup_scope: cleanup::ScopeId,
915 -> Block<'blk, 'tcx> {
916 // Translate the `self` argument first.
917 let arg_tys = ty::ty_fn_args(fn_ty);
919 let arg_datum = unpack_datum!(bcx, expr::trans(bcx, &*arg_exprs[0]));
920 llargs.push(unpack_result!(bcx, {
929 // Now untuple the rest of the arguments.
930 let tuple_expr = &arg_exprs[1];
931 let tuple_type = node_id_type(bcx, tuple_expr.id);
933 match ty::get(tuple_type).sty {
934 ty::ty_tup(ref field_types) => {
935 let tuple_datum = unpack_datum!(bcx,
936 expr::trans(bcx, &**tuple_expr));
937 let tuple_lvalue_datum =
939 tuple_datum.to_lvalue_datum(bcx,
942 let repr = adt::represent_type(bcx.ccx(), tuple_type);
943 let repr_ptr = &*repr;
944 for i in range(0, field_types.len()) {
945 let arg_datum = tuple_lvalue_datum.get_element(
949 adt::trans_field_ptr(bcx, repr_ptr, srcval, 0, i)
951 let arg_datum = arg_datum.to_expr_datum();
953 unpack_datum!(bcx, arg_datum.to_rvalue_datum(bcx, "arg"));
955 unpack_datum!(bcx, arg_datum.to_appropriate_datum(bcx));
956 llargs.push(arg_datum.add_clean(bcx.fcx, arg_cleanup_scope));
961 bcx.sess().span_bug(tuple_expr.span,
962 "argument to `.call()` wasn't a tuple?!")
969 fn trans_overloaded_call_args<'blk, 'tcx>(
970 mut bcx: Block<'blk, 'tcx>,
971 arg_exprs: Vec<&ast::Expr>,
973 llargs: &mut Vec<ValueRef>,
974 arg_cleanup_scope: cleanup::ScopeId,
976 -> Block<'blk, 'tcx> {
977 // Translate the `self` argument first.
978 let arg_tys = ty::ty_fn_args(fn_ty);
980 let arg_datum = unpack_datum!(bcx, expr::trans(bcx, arg_exprs[0]));
981 llargs.push(unpack_result!(bcx, {
990 // Now untuple the rest of the arguments.
991 let tuple_type = arg_tys[1];
992 match ty::get(tuple_type).sty {
993 ty::ty_tup(ref field_types) => {
994 for (i, &field_type) in field_types.iter().enumerate() {
996 unpack_datum!(bcx, expr::trans(bcx, arg_exprs[i + 1]));
997 llargs.push(unpack_result!(bcx, {
1008 bcx.sess().span_bug(arg_exprs[0].span,
1009 "argument to `.call()` wasn't a tuple?!")
1016 pub fn trans_args<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
1019 llargs: &mut Vec<ValueRef> ,
1020 arg_cleanup_scope: cleanup::ScopeId,
1023 -> Block<'blk, 'tcx> {
1024 debug!("trans_args(abi={})", abi);
1026 let _icx = push_ctxt("trans_args");
1027 let arg_tys = ty::ty_fn_args(fn_ty);
1028 let variadic = ty::fn_is_variadic(fn_ty);
1032 // First we figure out the caller's view of the types of the arguments.
1033 // This will be needed if this is a generic call, because the callee has
1034 // to cast her view of the arguments to the caller's view.
1036 ArgExprs(arg_exprs) => {
1037 if abi == synabi::RustCall {
1038 // This is only used for direct calls to the `call`,
1039 // `call_mut` or `call_once` functions.
1040 return trans_args_under_call_abi(cx,
1048 let num_formal_args = arg_tys.len();
1049 for (i, arg_expr) in arg_exprs.iter().enumerate() {
1050 if i == 0 && ignore_self {
1053 let arg_ty = if i >= num_formal_args {
1055 expr_ty_adjusted(cx, &**arg_expr)
1060 let arg_datum = unpack_datum!(bcx, expr::trans(bcx, &**arg_expr));
1061 llargs.push(unpack_result!(bcx, {
1062 trans_arg_datum(bcx, arg_ty, arg_datum,
1068 ArgOverloadedCall(arg_exprs) => {
1069 return trans_overloaded_call_args(cx,
1076 ArgOverloadedOp(lhs, rhs) => {
1079 llargs.push(unpack_result!(bcx, {
1080 trans_arg_datum(bcx, arg_tys[0], lhs,
1085 assert_eq!(arg_tys.len(), 1 + rhs.len());
1086 for (rhs, rhs_id) in rhs.into_iter() {
1087 llargs.push(unpack_result!(bcx, {
1088 trans_arg_datum(bcx, arg_tys[1], rhs,
1090 DoAutorefArg(rhs_id))
1095 llargs.push_all(vs);
1102 pub enum AutorefArg {
1104 DoAutorefArg(ast::NodeId)
1107 pub fn trans_arg_datum<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1108 formal_arg_ty: ty::t,
1109 arg_datum: Datum<Expr>,
1110 arg_cleanup_scope: cleanup::ScopeId,
1111 autoref_arg: AutorefArg)
1112 -> Result<'blk, 'tcx> {
1113 let _icx = push_ctxt("trans_arg_datum");
1115 let ccx = bcx.ccx();
1117 debug!("trans_arg_datum({})",
1118 formal_arg_ty.repr(bcx.tcx()));
1120 let arg_datum_ty = arg_datum.ty;
1122 debug!(" arg datum: {}", arg_datum.to_string(bcx.ccx()));
1125 // FIXME(#3548) use the adjustments table
1127 DoAutorefArg(arg_id) => {
1128 // We will pass argument by reference
1129 // We want an lvalue, so that we can pass by reference and
1130 let arg_datum = unpack_datum!(
1131 bcx, arg_datum.to_lvalue_datum(bcx, "arg", arg_id));
1132 val = arg_datum.val;
1135 // Make this an rvalue, since we are going to be
1136 // passing ownership.
1137 let arg_datum = unpack_datum!(
1138 bcx, arg_datum.to_rvalue_datum(bcx, "arg"));
1140 // Now that arg_datum is owned, get it into the appropriate
1141 // mode (ref vs value).
1142 let arg_datum = unpack_datum!(
1143 bcx, arg_datum.to_appropriate_datum(bcx));
1145 // Technically, ownership of val passes to the callee.
1146 // However, we must cleanup should we panic before the
1147 // callee is actually invoked.
1148 val = arg_datum.add_clean(bcx.fcx, arg_cleanup_scope);
1152 if formal_arg_ty != arg_datum_ty {
1153 // this could happen due to e.g. subtyping
1154 let llformal_arg_ty = type_of::type_of_explicit_arg(ccx, formal_arg_ty);
1155 debug!("casting actual type ({}) to match formal ({})",
1156 bcx.val_to_string(val), bcx.llty_str(llformal_arg_ty));
1157 debug!("Rust types: {}; {}", ty_to_string(bcx.tcx(), arg_datum_ty),
1158 ty_to_string(bcx.tcx(), formal_arg_ty));
1159 val = PointerCast(bcx, val, llformal_arg_ty);
1162 debug!("--- trans_arg_datum passing {}", bcx.val_to_string(val));
1163 Result::new(bcx, val)