1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! # Translation of Expressions
13 //! Public entry points:
15 //! - `trans_into(bcx, expr, dest) -> bcx`: evaluates an expression,
16 //! storing the result into `dest`. This is the preferred form, if you
19 //! - `trans(bcx, expr) -> DatumBlock`: evaluates an expression, yielding
20 //! `Datum` with the result. You can then store the datum, inspect
21 //! the value, etc. This may introduce temporaries if the datum is a
24 //! - `trans_to_lvalue(bcx, expr, "...") -> DatumBlock`: evaluates an
25 //! expression and ensures that the result has a cleanup associated with it,
26 //! creating a temporary stack slot if necessary.
28 //! - `trans_local_var -> Datum`: looks up a local variable or upvar.
30 //! See doc.rs for more comments.
32 #![allow(non_camel_case_types)]
34 pub use self::cast_kind::*;
35 pub use self::Dest::*;
36 use self::lazy_binop_ty::*;
39 use llvm::{mod, ValueRef};
41 use middle::mem_categorization::Typer;
42 use middle::subst::{mod, Substs};
43 use trans::{_match, adt, asm, base, callee, closure, consts, controlflow};
46 use trans::cleanup::{mod, CleanupMethods};
56 use middle::ty::{struct_fields, tup_fields};
57 use middle::ty::{AdjustDerefRef, AdjustReifyFnPointer, AdjustAddEnv, AutoUnsafe};
58 use middle::ty::{AutoPtr};
59 use middle::ty::{mod, Ty};
60 use middle::ty::MethodCall;
61 use util::common::indenter;
62 use util::ppaux::Repr;
63 use trans::machine::{llsize_of, llsize_of_alloc};
64 use trans::type_::Type;
66 use syntax::{ast, ast_util, codemap};
67 use syntax::print::pprust::{expr_to_string};
69 use syntax::parse::token;
71 use std::iter::repeat;
75 // These are passed around by the code generating functions to track the
76 // destination of a computation's value.
78 #[deriving(Copy, PartialEq)]
85 pub fn to_string(&self, ccx: &CrateContext) -> String {
87 SaveIn(v) => format!("SaveIn({})", ccx.tn().val_to_string(v)),
88 Ignore => "Ignore".to_string()
93 /// This function is equivalent to `trans(bcx, expr).store_to_dest(dest)` but it may generate
94 /// better optimized LLVM code.
95 pub fn trans_into<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
98 -> Block<'blk, 'tcx> {
101 if bcx.tcx().adjustments.borrow().contains_key(&expr.id) {
102 // use trans, which may be less efficient but
103 // which will perform the adjustments:
104 let datum = unpack_datum!(bcx, trans(bcx, expr));
105 return datum.store_to_dest(bcx, dest, expr.id)
108 debug!("trans_into() expr={}", expr.repr(bcx.tcx()));
110 let cleanup_debug_loc = debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
114 bcx.fcx.push_ast_cleanup_scope(cleanup_debug_loc);
116 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
117 let kind = ty::expr_kind(bcx.tcx(), expr);
119 ty::LvalueExpr | ty::RvalueDatumExpr => {
120 trans_unadjusted(bcx, expr).store_to_dest(dest, expr.id)
122 ty::RvalueDpsExpr => {
123 trans_rvalue_dps_unadjusted(bcx, expr, dest)
125 ty::RvalueStmtExpr => {
126 trans_rvalue_stmt_unadjusted(bcx, expr)
130 bcx.fcx.pop_and_trans_ast_cleanup_scope(bcx, expr.id)
133 /// Translates an expression, returning a datum (and new block) encapsulating the result. When
134 /// possible, it is preferred to use `trans_into`, as that may avoid creating a temporary on the
136 pub fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
138 -> DatumBlock<'blk, 'tcx, Expr> {
139 debug!("trans(expr={})", bcx.expr_to_string(expr));
144 let cleanup_debug_loc = debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
148 fcx.push_ast_cleanup_scope(cleanup_debug_loc);
149 let datum = unpack_datum!(bcx, trans_unadjusted(bcx, expr));
150 let datum = unpack_datum!(bcx, apply_adjustments(bcx, expr, datum));
151 bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, expr.id);
152 return DatumBlock::new(bcx, datum);
155 pub fn get_len(bcx: Block, fat_ptr: ValueRef) -> ValueRef {
156 GEPi(bcx, fat_ptr, &[0u, abi::FAT_PTR_EXTRA])
159 pub fn get_dataptr(bcx: Block, fat_ptr: ValueRef) -> ValueRef {
160 GEPi(bcx, fat_ptr, &[0u, abi::FAT_PTR_ADDR])
163 /// Helper for trans that apply adjustments from `expr` to `datum`, which should be the unadjusted
164 /// translation of `expr`.
165 fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
167 datum: Datum<'tcx, Expr>)
168 -> DatumBlock<'blk, 'tcx, Expr> {
170 let mut datum = datum;
171 let adjustment = match bcx.tcx().adjustments.borrow().get(&expr.id).cloned() {
173 return DatumBlock::new(bcx, datum);
177 debug!("unadjusted datum for expr {}: {}, adjustment={}",
178 expr.repr(bcx.tcx()),
179 datum.to_string(bcx.ccx()),
180 adjustment.repr(bcx.tcx()));
182 AdjustAddEnv(def_id, _) => {
183 datum = unpack_datum!(bcx, add_env(bcx, def_id, expr, datum));
185 AdjustReifyFnPointer(_def_id) => {
186 // FIXME(#19925) once fn item types are
187 // zero-sized, we'll need to do something here
189 AdjustDerefRef(ref adj) => {
190 let (autoderefs, use_autoref) = match adj.autoref {
191 // Extracting a value from a box counts as a deref, but if we are
192 // just converting Box<[T, ..n]> to Box<[T]> we aren't really doing
193 // a deref (and wouldn't if we could treat Box like a normal struct).
194 Some(ty::AutoUnsizeUniq(..)) => (adj.autoderefs - 1, true),
195 // We are a bit paranoid about adjustments and thus might have a re-
196 // borrow here which merely derefs and then refs again (it might have
197 // a different region or mutability, but we don't care here. It might
198 // also be just in case we need to unsize. But if there are no nested
199 // adjustments then it should be a no-op).
200 Some(ty::AutoPtr(_, _, None)) if adj.autoderefs == 1 => {
202 // Don't skip a conversion from Box<T> to &T, etc.
204 let method_call = MethodCall::autoderef(expr.id, adj.autoderefs-1);
205 let method = bcx.tcx().method_map.borrow().get(&method_call).is_some();
207 // Don't skip an overloaded deref.
208 (adj.autoderefs, true)
210 (adj.autoderefs - 1, false)
213 _ => (adj.autoderefs, true),
216 _ => (adj.autoderefs, true)
221 let lval = unpack_datum!(bcx, datum.to_lvalue_datum(bcx, "auto_deref", expr.id));
222 datum = unpack_datum!(
223 bcx, deref_multiple(bcx, expr, lval.to_expr_datum(), autoderefs));
226 // (You might think there is a more elegant way to do this than a
227 // use_autoref bool, but then you remember that the borrow checker exists).
228 if let (true, &Some(ref a)) = (use_autoref, &adj.autoref) {
229 datum = unpack_datum!(bcx, apply_autoref(a,
236 debug!("after adjustments, datum={}", datum.to_string(bcx.ccx()));
237 return DatumBlock::new(bcx, datum);
239 fn apply_autoref<'blk, 'tcx>(autoref: &ty::AutoRef<'tcx>,
240 bcx: Block<'blk, 'tcx>,
242 datum: Datum<'tcx, Expr>)
243 -> DatumBlock<'blk, 'tcx, Expr> {
245 let mut datum = datum;
247 let datum = match autoref {
248 &AutoPtr(_, _, ref a) | &AutoUnsafe(_, ref a) => {
251 &Some(box ref a) => {
252 datum = unpack_datum!(bcx, apply_autoref(a, bcx, expr, datum));
256 unpack_datum!(bcx, ref_ptr(bcx, expr, datum))
258 &ty::AutoUnsize(ref k) => {
259 debug!(" AutoUnsize");
260 unpack_datum!(bcx, unsize_expr(bcx, expr, datum, k))
263 &ty::AutoUnsizeUniq(ty::UnsizeLength(len)) => {
264 debug!(" AutoUnsizeUniq(UnsizeLength)");
265 unpack_datum!(bcx, unsize_unique_vec(bcx, expr, datum, len))
267 &ty::AutoUnsizeUniq(ref k) => {
268 debug!(" AutoUnsizeUniq");
269 unpack_datum!(bcx, unsize_unique_expr(bcx, expr, datum, k))
273 DatumBlock::new(bcx, datum)
276 fn ref_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
278 datum: Datum<'tcx, Expr>)
279 -> DatumBlock<'blk, 'tcx, Expr> {
280 debug!("ref_ptr(expr={}, datum={})",
281 expr.repr(bcx.tcx()),
282 datum.to_string(bcx.ccx()));
284 if !type_is_sized(bcx.tcx(), datum.ty) {
285 debug!("Taking address of unsized type {}",
286 bcx.ty_to_string(datum.ty));
287 ref_fat_ptr(bcx, expr, datum)
289 debug!("Taking address of sized type {}",
290 bcx.ty_to_string(datum.ty));
291 auto_ref(bcx, datum, expr)
295 // Retrieve the information we are losing (making dynamic) in an unsizing
297 // When making a dtor, we need to do different things depending on the
298 // ownership of the object.. mk_ty is a function for turning `unadjusted_ty`
299 // into a type to be destructed. If we want to end up with a Box pointer,
300 // then mk_ty should make a Box pointer (T -> Box<T>), if we want a
301 // borrowed reference then it should be T -> &T.
302 fn unsized_info<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
303 kind: &ty::UnsizeKind<'tcx>,
305 unadjusted_ty: Ty<'tcx>,
306 mk_ty: F) -> ValueRef where
307 F: FnOnce(Ty<'tcx>) -> Ty<'tcx>,
309 // FIXME(#19596) workaround: `|t| t` causes monomorphization recursion
310 fn identity<T>(t: T) -> T { t }
312 debug!("unsized_info(kind={}, id={}, unadjusted_ty={})",
313 kind, id, unadjusted_ty.repr(bcx.tcx()));
315 &ty::UnsizeLength(len) => C_uint(bcx.ccx(), len),
316 &ty::UnsizeStruct(box ref k, tp_index) => match unadjusted_ty.sty {
317 ty::ty_struct(_, ref substs) => {
318 let ty_substs = substs.types.get_slice(subst::TypeSpace);
319 // The dtor for a field treats it like a value, so mk_ty
320 // should just be the identity function.
321 unsized_info(bcx, k, id, ty_substs[tp_index], identity)
323 _ => bcx.sess().bug(format!("UnsizeStruct with bad sty: {}",
324 bcx.ty_to_string(unadjusted_ty))[])
326 &ty::UnsizeVtable(ty::TyTrait { ref principal, .. }, _) => {
327 // Note that we preserve binding levels here:
328 let substs = principal.0.substs.with_self_ty(unadjusted_ty).erase_regions();
329 let substs = bcx.tcx().mk_substs(substs);
331 ty::Binder(Rc::new(ty::TraitRef { def_id: principal.def_id(),
333 let trait_ref = bcx.monomorphize(&trait_ref);
334 let box_ty = mk_ty(unadjusted_ty);
336 meth::get_vtable(bcx, box_ty, trait_ref),
337 Type::vtable_ptr(bcx.ccx()))
342 fn unsize_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
344 datum: Datum<'tcx, Expr>,
345 k: &ty::UnsizeKind<'tcx>)
346 -> DatumBlock<'blk, 'tcx, Expr> {
348 let datum_ty = datum.ty;
349 let unsized_ty = ty::unsize_ty(tcx, datum_ty, k, expr.span);
350 debug!("unsized_ty={}", unsized_ty.repr(bcx.tcx()));
351 let dest_ty = ty::mk_open(tcx, unsized_ty);
352 debug!("dest_ty={}", unsized_ty.repr(bcx.tcx()));
353 // Closures for extracting and manipulating the data and payload parts of
355 let info = |: bcx, _val| unsized_info(bcx,
360 tcx.mk_region(ty::ReStatic),
363 mutbl: ast::MutImmutable
366 ty::UnsizeStruct(..) =>
367 into_fat_ptr(bcx, expr, datum, dest_ty, |bcx, val| {
368 PointerCast(bcx, val, type_of::type_of(bcx.ccx(), unsized_ty).ptr_to())
370 ty::UnsizeLength(..) =>
371 into_fat_ptr(bcx, expr, datum, dest_ty, |bcx, val| {
372 GEPi(bcx, val, &[0u, 0u])
374 ty::UnsizeVtable(..) =>
375 into_fat_ptr(bcx, expr, datum, dest_ty, |_bcx, val| {
376 PointerCast(bcx, val, Type::i8p(bcx.ccx()))
381 fn ref_fat_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
383 datum: Datum<'tcx, Expr>)
384 -> DatumBlock<'blk, 'tcx, Expr> {
386 let dest_ty = ty::close_type(tcx, datum.ty);
387 let base = |: bcx, val| Load(bcx, get_dataptr(bcx, val));
388 let len = |: bcx, val| Load(bcx, get_len(bcx, val));
389 into_fat_ptr(bcx, expr, datum, dest_ty, base, len)
392 fn into_fat_ptr<'blk, 'tcx, F, G>(bcx: Block<'blk, 'tcx>,
394 datum: Datum<'tcx, Expr>,
398 -> DatumBlock<'blk, 'tcx, Expr> where
399 F: FnOnce(Block<'blk, 'tcx>, ValueRef) -> ValueRef,
400 G: FnOnce(Block<'blk, 'tcx>, ValueRef) -> ValueRef,
405 let lval = unpack_datum!(bcx,
406 datum.to_lvalue_datum(bcx, "into_fat_ptr", expr.id));
407 let base = base(bcx, lval.val);
408 let info = info(bcx, lval.val);
410 let scratch = rvalue_scratch_datum(bcx, dest_ty, "__fat_ptr");
411 Store(bcx, base, get_dataptr(bcx, scratch.val));
412 Store(bcx, info, get_len(bcx, scratch.val));
414 DatumBlock::new(bcx, scratch.to_expr_datum())
417 fn unsize_unique_vec<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
419 datum: Datum<'tcx, Expr>,
421 -> DatumBlock<'blk, 'tcx, Expr> {
425 let datum_ty = datum.ty;
427 let lval = unpack_datum!(bcx,
428 datum.to_lvalue_datum(bcx, "unsize_unique_vec", expr.id));
430 let ll_len = C_uint(bcx.ccx(), len);
431 let unit_ty = ty::sequence_element_type(tcx, ty::type_content(datum_ty));
432 let vec_ty = ty::mk_uniq(tcx, ty::mk_vec(tcx, unit_ty, None));
433 let scratch = rvalue_scratch_datum(bcx, vec_ty, "__unsize_unique");
435 let base = get_dataptr(bcx, scratch.val);
436 let base = PointerCast(bcx,
438 type_of::type_of(bcx.ccx(), datum_ty).ptr_to());
439 bcx = lval.store_to(bcx, base);
441 Store(bcx, ll_len, get_len(bcx, scratch.val));
442 DatumBlock::new(bcx, scratch.to_expr_datum())
445 fn unsize_unique_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
447 datum: Datum<'tcx, Expr>,
448 k: &ty::UnsizeKind<'tcx>)
449 -> DatumBlock<'blk, 'tcx, Expr> {
453 let datum_ty = datum.ty;
454 let unboxed_ty = match datum_ty.sty {
456 _ => bcx.sess().bug(format!("Expected ty_uniq, found {}",
457 bcx.ty_to_string(datum_ty))[])
459 let result_ty = ty::mk_uniq(tcx, ty::unsize_ty(tcx, unboxed_ty, k, expr.span));
461 let lval = unpack_datum!(bcx,
462 datum.to_lvalue_datum(bcx, "unsize_unique_expr", expr.id));
464 let scratch = rvalue_scratch_datum(bcx, result_ty, "__uniq_fat_ptr");
465 let llbox_ty = type_of::type_of(bcx.ccx(), datum_ty);
466 let base = PointerCast(bcx, get_dataptr(bcx, scratch.val), llbox_ty.ptr_to());
467 bcx = lval.store_to(bcx, base);
469 let info = unsized_info(bcx, k, expr.id, unboxed_ty, |t| ty::mk_uniq(tcx, t));
470 Store(bcx, info, get_len(bcx, scratch.val));
472 let scratch = unpack_datum!(bcx,
473 scratch.to_expr_datum().to_lvalue_datum(bcx,
474 "fresh_uniq_fat_ptr",
477 DatumBlock::new(bcx, scratch.to_expr_datum())
480 fn add_env<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
483 datum: Datum<'tcx, Expr>)
484 -> DatumBlock<'blk, 'tcx, Expr> {
485 // This is not the most efficient thing possible; since closures
486 // are two words it'd be better if this were compiled in
487 // 'dest' mode, but I can't find a nice way to structure the
488 // code and keep it DRY that accommodates that use case at the
491 let closure_ty = expr_ty_adjusted(bcx, expr);
492 let fn_ptr = datum.to_llscalarish(bcx);
493 closure::make_closure_from_bare_fn(bcx, closure_ty, def_id, fn_ptr)
497 /// Translates an expression in "lvalue" mode -- meaning that it returns a reference to the memory
498 /// that the expr represents.
500 /// If this expression is an rvalue, this implies introducing a temporary. In other words,
501 /// something like `x().f` is translated into roughly the equivalent of
503 /// { tmp = x(); tmp.f }
504 pub fn trans_to_lvalue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
507 -> DatumBlock<'blk, 'tcx, Lvalue> {
509 let datum = unpack_datum!(bcx, trans(bcx, expr));
510 return datum.to_lvalue_datum(bcx, name, expr.id);
513 /// A version of `trans` that ignores adjustments. You almost certainly do not want to call this
515 fn trans_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
517 -> DatumBlock<'blk, 'tcx, Expr> {
520 debug!("trans_unadjusted(expr={})", bcx.expr_to_string(expr));
521 let _indenter = indenter();
523 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
525 return match ty::expr_kind(bcx.tcx(), expr) {
526 ty::LvalueExpr | ty::RvalueDatumExpr => {
527 let datum = unpack_datum!(bcx, {
528 trans_datum_unadjusted(bcx, expr)
531 DatumBlock {bcx: bcx, datum: datum}
534 ty::RvalueStmtExpr => {
535 bcx = trans_rvalue_stmt_unadjusted(bcx, expr);
536 nil(bcx, expr_ty(bcx, expr))
539 ty::RvalueDpsExpr => {
540 let ty = expr_ty(bcx, expr);
541 if type_is_zero_size(bcx.ccx(), ty) {
542 bcx = trans_rvalue_dps_unadjusted(bcx, expr, Ignore);
545 let scratch = rvalue_scratch_datum(bcx, ty, "");
546 bcx = trans_rvalue_dps_unadjusted(
547 bcx, expr, SaveIn(scratch.val));
549 // Note: this is not obviously a good idea. It causes
550 // immediate values to be loaded immediately after a
551 // return from a call or other similar expression,
552 // which in turn leads to alloca's having shorter
553 // lifetimes and hence larger stack frames. However,
554 // in turn it can lead to more register pressure.
555 // Still, in practice it seems to increase
556 // performance, since we have fewer problems with
558 let scratch = unpack_datum!(
559 bcx, scratch.to_appropriate_datum(bcx));
561 DatumBlock::new(bcx, scratch.to_expr_datum())
566 fn nil<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ty: Ty<'tcx>)
567 -> DatumBlock<'blk, 'tcx, Expr> {
568 let llval = C_undef(type_of::type_of(bcx.ccx(), ty));
569 let datum = immediate_rvalue(llval, ty);
570 DatumBlock::new(bcx, datum.to_expr_datum())
574 fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
576 -> DatumBlock<'blk, 'tcx, Expr> {
579 let _icx = push_ctxt("trans_datum_unadjusted");
582 ast::ExprParen(ref e) => {
585 ast::ExprPath(_) => {
586 trans_def(bcx, expr, bcx.def(expr.id))
588 ast::ExprField(ref base, ident) => {
589 trans_rec_field(bcx, &**base, ident.node)
591 ast::ExprTupField(ref base, idx) => {
592 trans_rec_tup_field(bcx, &**base, idx.node)
594 ast::ExprIndex(ref base, ref idx) => {
596 ast::ExprRange(ref start, ref end) => {
597 // Special case for slicing syntax (KILLME).
598 let _icx = push_ctxt("trans_slice");
601 let method_call = MethodCall::expr(expr.id);
602 let method_ty = ccx.tcx()
606 .map(|method| method.ty);
607 let base_datum = unpack_datum!(bcx, trans(bcx, &**base));
609 let mut args = vec![];
610 start.as_ref().map(|e| args.push((unpack_datum!(bcx, trans(bcx, &**e)), e.id)));
611 end.as_ref().map(|e| args.push((unpack_datum!(bcx, trans(bcx, &**e)), e.id)));
613 let result_ty = ty::ty_fn_ret(monomorphize_type(bcx,
614 method_ty.unwrap())).unwrap();
615 let scratch = rvalue_scratch_datum(bcx, result_ty, "trans_slice");
618 trans_overloaded_op(bcx,
623 Some(SaveIn(scratch.val)),
625 DatumBlock::new(bcx, scratch.to_expr_datum())
627 _ => trans_index(bcx, expr, &**base, &**idx, MethodCall::expr(expr.id))
630 ast::ExprBox(_, ref contents) => {
631 // Special case for `Box<T>`
632 let box_ty = expr_ty(bcx, expr);
633 let contents_ty = expr_ty(bcx, &**contents);
636 trans_uniq_expr(bcx, box_ty, &**contents, contents_ty)
638 _ => bcx.sess().span_bug(expr.span,
639 "expected unique box")
643 ast::ExprLit(ref lit) => trans_immediate_lit(bcx, expr, &**lit),
644 ast::ExprBinary(op, ref lhs, ref rhs) => {
645 trans_binary(bcx, expr, op, &**lhs, &**rhs)
647 ast::ExprUnary(op, ref x) => {
648 trans_unary(bcx, expr, op, &**x)
650 ast::ExprAddrOf(_, ref x) => {
652 ast::ExprRepeat(..) | ast::ExprVec(..) => {
653 // Special case for slices.
654 let cleanup_debug_loc =
655 debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
659 fcx.push_ast_cleanup_scope(cleanup_debug_loc);
660 let datum = unpack_datum!(
661 bcx, tvec::trans_slice_vec(bcx, expr, &**x));
662 bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, x.id);
663 DatumBlock::new(bcx, datum)
666 trans_addr_of(bcx, expr, &**x)
670 ast::ExprCast(ref val, _) => {
671 // Datum output mode means this is a scalar cast:
672 trans_imm_cast(bcx, &**val, expr.id)
675 bcx.tcx().sess.span_bug(
677 format!("trans_rvalue_datum_unadjusted reached \
678 fall-through case: {}",
684 fn trans_field<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
687 -> DatumBlock<'blk, 'tcx, Expr> where
688 F: FnOnce(&'blk ty::ctxt<'tcx>, &[ty::field<'tcx>]) -> uint,
691 let _icx = push_ctxt("trans_rec_field");
693 let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, base, "field"));
694 let bare_ty = ty::unopen_type(base_datum.ty);
695 let repr = adt::represent_type(bcx.ccx(), bare_ty);
696 with_field_tys(bcx.tcx(), bare_ty, None, move |discr, field_tys| {
697 let ix = get_idx(bcx.tcx(), field_tys);
698 let d = base_datum.get_element(
701 |srcval| adt::trans_field_ptr(bcx, &*repr, srcval, discr, ix));
703 if type_is_sized(bcx.tcx(), d.ty) {
704 DatumBlock { datum: d.to_expr_datum(), bcx: bcx }
706 let scratch = rvalue_scratch_datum(bcx, ty::mk_open(bcx.tcx(), d.ty), "");
707 Store(bcx, d.val, get_dataptr(bcx, scratch.val));
708 let info = Load(bcx, get_len(bcx, base_datum.val));
709 Store(bcx, info, get_len(bcx, scratch.val));
711 DatumBlock::new(bcx, scratch.to_expr_datum())
718 /// Translates `base.field`.
719 fn trans_rec_field<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
722 -> DatumBlock<'blk, 'tcx, Expr> {
723 trans_field(bcx, base, |tcx, field_tys| ty::field_idx_strict(tcx, field.name, field_tys))
726 /// Translates `base.<idx>`.
727 fn trans_rec_tup_field<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
730 -> DatumBlock<'blk, 'tcx, Expr> {
731 trans_field(bcx, base, |_, _| idx)
734 fn trans_index<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
735 index_expr: &ast::Expr,
738 method_call: MethodCall)
739 -> DatumBlock<'blk, 'tcx, Expr> {
740 //! Translates `base[idx]`.
742 let _icx = push_ctxt("trans_index");
746 // Check for overloaded index.
747 let method_ty = ccx.tcx()
751 .map(|method| method.ty);
752 let elt_datum = match method_ty {
754 let base_datum = unpack_datum!(bcx, trans(bcx, base));
756 // Translate index expression.
757 let ix_datum = unpack_datum!(bcx, trans(bcx, idx));
759 let ref_ty = ty::ty_fn_ret(monomorphize_type(bcx, method_ty)).unwrap();
760 let elt_ty = match ty::deref(ref_ty, true) {
762 bcx.tcx().sess.span_bug(index_expr.span,
763 "index method didn't return a \
764 dereferenceable type?!")
766 Some(elt_tm) => elt_tm.ty,
769 // Overloaded. Evaluate `trans_overloaded_op`, which will
770 // invoke the user's index() method, which basically yields
771 // a `&T` pointer. We can then proceed down the normal
772 // path (below) to dereference that `&T`.
773 let scratch = rvalue_scratch_datum(bcx, ref_ty, "overloaded_index_elt");
775 trans_overloaded_op(bcx,
779 vec![(ix_datum, idx.id)],
780 Some(SaveIn(scratch.val)),
782 let datum = scratch.to_expr_datum();
783 if type_is_sized(bcx.tcx(), elt_ty) {
784 Datum::new(datum.to_llscalarish(bcx), elt_ty, LvalueExpr)
786 Datum::new(datum.val, ty::mk_open(bcx.tcx(), elt_ty), LvalueExpr)
790 let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx,
794 // Translate index expression and cast to a suitable LLVM integer.
795 // Rust is less strict than LLVM in this regard.
796 let ix_datum = unpack_datum!(bcx, trans(bcx, idx));
797 let ix_val = ix_datum.to_llscalarish(bcx);
798 let ix_size = machine::llbitsize_of_real(bcx.ccx(),
800 let int_size = machine::llbitsize_of_real(bcx.ccx(),
803 if ix_size < int_size {
804 if ty::type_is_signed(expr_ty(bcx, idx)) {
805 SExt(bcx, ix_val, ccx.int_type())
806 } else { ZExt(bcx, ix_val, ccx.int_type()) }
807 } else if ix_size > int_size {
808 Trunc(bcx, ix_val, ccx.int_type())
816 ty::sequence_element_type(bcx.tcx(),
818 base::maybe_name_value(bcx.ccx(), vt.llunit_size, "unit_sz");
820 let (base, len) = base_datum.get_vec_base_and_len(bcx);
822 debug!("trans_index: base {}", bcx.val_to_string(base));
823 debug!("trans_index: len {}", bcx.val_to_string(len));
825 let bounds_check = ICmp(bcx, llvm::IntUGE, ix_val, len);
826 let expect = ccx.get_intrinsic(&("llvm.expect.i1"));
827 let expected = Call(bcx,
829 &[bounds_check, C_bool(ccx, false)],
831 bcx = with_cond(bcx, expected, |bcx| {
832 controlflow::trans_fail_bounds_check(bcx,
837 let elt = InBoundsGEP(bcx, base, &[ix_val]);
838 let elt = PointerCast(bcx, elt, vt.llunit_ty.ptr_to());
839 Datum::new(elt, vt.unit_ty, LvalueExpr)
843 DatumBlock::new(bcx, elt_datum)
846 fn trans_def<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
847 ref_expr: &ast::Expr,
849 -> DatumBlock<'blk, 'tcx, Expr> {
850 //! Translates a reference to a path.
852 let _icx = push_ctxt("trans_def_lvalue");
854 def::DefFn(..) | def::DefStaticMethod(..) | def::DefMethod(..) |
855 def::DefStruct(_) | def::DefVariant(..) => {
856 trans_def_fn_unadjusted(bcx, ref_expr, def)
858 def::DefStatic(did, _) => {
859 // There are two things that may happen here:
860 // 1) If the static item is defined in this crate, it will be
861 // translated using `get_item_val`, and we return a pointer to
863 // 2) If the static item is defined in another crate then we add
864 // (or reuse) a declaration of an external global, and return a
866 let const_ty = expr_ty(bcx, ref_expr);
868 fn get_val<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, did: ast::DefId,
869 const_ty: Ty<'tcx>) -> ValueRef {
870 // For external constants, we don't inline.
871 if did.krate == ast::LOCAL_CRATE {
874 // The LLVM global has the type of its initializer,
875 // which may not be equal to the enum's type for
877 let val = base::get_item_val(bcx.ccx(), did.node);
878 let pty = type_of::type_of(bcx.ccx(), const_ty).ptr_to();
879 PointerCast(bcx, val, pty)
882 base::get_extern_const(bcx.ccx(), did, const_ty)
885 let val = get_val(bcx, did, const_ty);
886 DatumBlock::new(bcx, Datum::new(val, const_ty, LvalueExpr))
888 def::DefConst(did) => {
889 // First, inline any external constants into the local crate so we
890 // can be sure to get the LLVM value corresponding to it.
891 let did = inline::maybe_instantiate_inline(bcx.ccx(), did);
892 if did.krate != ast::LOCAL_CRATE {
893 bcx.tcx().sess.span_bug(ref_expr.span,
894 "cross crate constant could not \
897 let val = base::get_item_val(bcx.ccx(), did.node);
899 // Next, we need to crate a ByRef rvalue datum to return. We can't
900 // use the normal .to_ref_datum() function because the type of
901 // `val` is not actually the same as `const_ty`.
903 // To get around this, we make a custom alloca slot with the
904 // appropriate type (const_ty), and then we cast it to a pointer of
905 // typeof(val), store the value, and then hand this slot over to
906 // the datum infrastructure.
907 let const_ty = expr_ty(bcx, ref_expr);
908 let llty = type_of::type_of(bcx.ccx(), const_ty);
909 let slot = alloca(bcx, llty, "const");
910 let pty = Type::from_ref(unsafe { llvm::LLVMTypeOf(val) }).ptr_to();
911 Store(bcx, val, PointerCast(bcx, slot, pty));
913 let datum = Datum::new(slot, const_ty, Rvalue::new(ByRef));
914 DatumBlock::new(bcx, datum.to_expr_datum())
917 DatumBlock::new(bcx, trans_local_var(bcx, def).to_expr_datum())
922 fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
924 -> Block<'blk, 'tcx> {
926 let _icx = push_ctxt("trans_rvalue_stmt");
928 if bcx.unreachable.get() {
932 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
935 ast::ExprParen(ref e) => {
936 trans_into(bcx, &**e, Ignore)
938 ast::ExprBreak(label_opt) => {
939 controlflow::trans_break(bcx, expr.id, label_opt)
941 ast::ExprAgain(label_opt) => {
942 controlflow::trans_cont(bcx, expr.id, label_opt)
944 ast::ExprRet(ref ex) => {
945 // Check to see if the return expression itself is reachable.
946 // This can occur when the inner expression contains a return
947 let reachable = if let Some(ref cfg) = bcx.fcx.cfg {
948 cfg.node_is_reachable(expr.id)
954 controlflow::trans_ret(bcx, ex.as_ref().map(|e| &**e))
956 // If it's not reachable, just translate the inner expression
957 // directly. This avoids having to manage a return slot when
958 // it won't actually be used anyway.
959 if let &Some(ref x) = ex {
960 bcx = trans_into(bcx, &**x, Ignore);
962 // Mark the end of the block as unreachable. Once we get to
963 // a return expression, there's no more we should be doing
969 ast::ExprWhile(ref cond, ref body, _) => {
970 controlflow::trans_while(bcx, expr.id, &**cond, &**body)
972 ast::ExprForLoop(ref pat, ref head, ref body, _) => {
973 controlflow::trans_for(bcx,
979 ast::ExprLoop(ref body, _) => {
980 controlflow::trans_loop(bcx, expr.id, &**body)
982 ast::ExprAssign(ref dst, ref src) => {
983 let src_datum = unpack_datum!(bcx, trans(bcx, &**src));
984 let dst_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, &**dst, "assign"));
986 if type_needs_drop(bcx.tcx(), dst_datum.ty) {
987 // If there are destructors involved, make sure we
988 // are copying from an rvalue, since that cannot possible
989 // alias an lvalue. We are concerned about code like:
997 // where e.g. a : Option<Foo> and a.b :
998 // Option<Foo>. In that case, freeing `a` before the
999 // assignment may also free `a.b`!
1001 // We could avoid this intermediary with some analysis
1002 // to determine whether `dst` may possibly own `src`.
1003 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
1004 let src_datum = unpack_datum!(
1005 bcx, src_datum.to_rvalue_datum(bcx, "ExprAssign"));
1006 bcx = glue::drop_ty(bcx,
1009 Some(NodeInfo { id: expr.id, span: expr.span }));
1010 src_datum.store_to(bcx, dst_datum.val)
1012 src_datum.store_to(bcx, dst_datum.val)
1015 ast::ExprAssignOp(op, ref dst, ref src) => {
1016 trans_assign_op(bcx, expr, op, &**dst, &**src)
1018 ast::ExprInlineAsm(ref a) => {
1019 asm::trans_inline_asm(bcx, a)
1022 bcx.tcx().sess.span_bug(
1024 format!("trans_rvalue_stmt_unadjusted reached \
1025 fall-through case: {}",
1031 fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1034 -> Block<'blk, 'tcx> {
1035 let _icx = push_ctxt("trans_rvalue_dps_unadjusted");
1037 let tcx = bcx.tcx();
1039 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
1042 ast::ExprParen(ref e) => {
1043 trans_into(bcx, &**e, dest)
1045 ast::ExprPath(_) => {
1046 trans_def_dps_unadjusted(bcx, expr, bcx.def(expr.id), dest)
1048 ast::ExprIf(ref cond, ref thn, ref els) => {
1049 controlflow::trans_if(bcx, expr.id, &**cond, &**thn, els.as_ref().map(|e| &**e), dest)
1051 ast::ExprMatch(ref discr, ref arms, _) => {
1052 _match::trans_match(bcx, expr, &**discr, arms[], dest)
1054 ast::ExprBlock(ref blk) => {
1055 controlflow::trans_block(bcx, &**blk, dest)
1057 ast::ExprStruct(_, ref fields, ref base) => {
1060 base.as_ref().map(|e| &**e),
1063 node_id_type(bcx, expr.id),
1066 ast::ExprRange(ref start, ref end) => {
1067 // FIXME it is just not right that we are synthesising ast nodes in
1069 fn make_field(field_name: &str, expr: P<ast::Expr>) -> ast::Field {
1071 ident: codemap::dummy_spanned(token::str_to_ident(field_name)),
1073 span: codemap::DUMMY_SP,
1077 // A range just desugars into a struct.
1078 // Note that the type of the start and end may not be the same, but
1079 // they should only differ in their lifetime, which should not matter
1081 let (did, fields, ty_params) = match (start, end) {
1082 (&Some(ref start), &Some(ref end)) => {
1084 let fields = vec![make_field("start", start.clone()),
1085 make_field("end", end.clone())];
1086 (tcx.lang_items.range_struct(), fields, vec![node_id_type(bcx, start.id)])
1088 (&Some(ref start), &None) => {
1089 // Desugar to RangeFrom
1090 let fields = vec![make_field("start", start.clone())];
1091 (tcx.lang_items.range_from_struct(), fields, vec![node_id_type(bcx, start.id)])
1093 (&None, &Some(ref end)) => {
1094 // Desugar to RangeTo
1095 let fields = vec![make_field("end", end.clone())];
1096 (tcx.lang_items.range_to_struct(), fields, vec![node_id_type(bcx, end.id)])
1099 // Desugar to FullRange
1100 (tcx.lang_items.full_range_struct(), vec![], vec![])
1104 if let Some(did) = did {
1105 let substs = Substs::new_type(ty_params, vec![]);
1111 ty::mk_struct(tcx, did, tcx.mk_substs(substs)),
1114 tcx.sess.span_bug(expr.span,
1115 "No lang item for ranges (how did we get this far?)")
1118 ast::ExprTup(ref args) => {
1119 let numbered_fields: Vec<(uint, &ast::Expr)> =
1120 args.iter().enumerate().map(|(i, arg)| (i, &**arg)).collect();
1127 Some(NodeInfo { id: expr.id, span: expr.span }))
1129 ast::ExprLit(ref lit) => {
1131 ast::LitStr(ref s, _) => {
1132 tvec::trans_lit_str(bcx, expr, (*s).clone(), dest)
1137 .span_bug(expr.span,
1138 "trans_rvalue_dps_unadjusted shouldn't be \
1139 translating this type of literal")
1143 ast::ExprVec(..) | ast::ExprRepeat(..) => {
1144 tvec::trans_fixed_vstore(bcx, expr, dest)
1146 ast::ExprClosure(_, _, ref decl, ref body) => {
1147 // Check the side-table to see whether this is an unboxed
1148 // closure or an older, legacy style closure. Store this
1149 // into a variable to ensure the the RefCell-lock is
1150 // released before we recurse.
1151 let is_unboxed_closure =
1152 bcx.tcx().unboxed_closures.borrow().contains_key(&ast_util::local_def(expr.id));
1153 if is_unboxed_closure {
1154 closure::trans_unboxed_closure(bcx, &**decl, &**body, expr.id, dest)
1156 let expr_ty = expr_ty(bcx, expr);
1157 let store = ty::ty_closure_store(expr_ty);
1158 debug!("translating block function {} with type {}",
1159 expr_to_string(expr), expr_ty.repr(tcx));
1160 closure::trans_expr_fn(bcx, store, &**decl, &**body, expr.id, dest)
1163 ast::ExprCall(ref f, ref args) => {
1164 if bcx.tcx().is_method_call(expr.id) {
1165 trans_overloaded_call(bcx,
1171 callee::trans_call(bcx,
1174 callee::ArgExprs(args[]),
1178 ast::ExprMethodCall(_, _, ref args) => {
1179 callee::trans_method_call(bcx,
1182 callee::ArgExprs(args[]),
1185 ast::ExprBinary(op, ref lhs, ref rhs) => {
1186 // if not overloaded, would be RvalueDatumExpr
1187 let lhs = unpack_datum!(bcx, trans(bcx, &**lhs));
1188 let rhs_datum = unpack_datum!(bcx, trans(bcx, &**rhs));
1189 trans_overloaded_op(bcx, expr, MethodCall::expr(expr.id), lhs,
1190 vec![(rhs_datum, rhs.id)], Some(dest),
1191 !ast_util::is_by_value_binop(op)).bcx
1193 ast::ExprUnary(op, ref subexpr) => {
1194 // if not overloaded, would be RvalueDatumExpr
1195 let arg = unpack_datum!(bcx, trans(bcx, &**subexpr));
1196 trans_overloaded_op(bcx, expr, MethodCall::expr(expr.id),
1197 arg, Vec::new(), Some(dest), !ast_util::is_by_value_unop(op)).bcx
1199 ast::ExprIndex(ref base, ref idx) => {
1200 // if not overloaded, would be RvalueDatumExpr
1201 let base = unpack_datum!(bcx, trans(bcx, &**base));
1202 let idx_datum = unpack_datum!(bcx, trans(bcx, &**idx));
1203 trans_overloaded_op(bcx, expr, MethodCall::expr(expr.id), base,
1204 vec![(idx_datum, idx.id)], Some(dest), true).bcx
1206 ast::ExprCast(ref val, _) => {
1207 // DPS output mode means this is a trait cast:
1208 if ty::type_is_trait(node_id_type(bcx, expr.id)) {
1210 bcx.tcx().object_cast_map.borrow()
1212 .map(|t| (*t).clone())
1214 let trait_ref = bcx.monomorphize(&trait_ref);
1215 let datum = unpack_datum!(bcx, trans(bcx, &**val));
1216 meth::trans_trait_cast(bcx, datum, expr.id,
1219 bcx.tcx().sess.span_bug(expr.span,
1220 "expr_cast of non-trait");
1223 ast::ExprAssignOp(op, ref dst, ref src) => {
1224 trans_assign_op(bcx, expr, op, &**dst, &**src)
1227 bcx.tcx().sess.span_bug(
1229 format!("trans_rvalue_dps_unadjusted reached fall-through \
1236 fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1237 ref_expr: &ast::Expr,
1240 -> Block<'blk, 'tcx> {
1241 let _icx = push_ctxt("trans_def_dps_unadjusted");
1243 let lldest = match dest {
1244 SaveIn(lldest) => lldest,
1245 Ignore => { return bcx; }
1249 def::DefVariant(tid, vid, _) => {
1250 let variant_info = ty::enum_variant_with_id(bcx.tcx(), tid, vid);
1251 if variant_info.args.len() > 0u {
1253 let llfn = callee::trans_fn_ref(bcx, vid, ExprId(ref_expr.id));
1254 Store(bcx, llfn, lldest);
1258 let ty = expr_ty(bcx, ref_expr);
1259 let repr = adt::represent_type(bcx.ccx(), ty);
1260 adt::trans_set_discr(bcx, &*repr, lldest,
1261 variant_info.disr_val);
1265 def::DefStruct(_) => {
1266 let ty = expr_ty(bcx, ref_expr);
1268 ty::ty_struct(did, _) if ty::has_dtor(bcx.tcx(), did) => {
1269 let repr = adt::represent_type(bcx.ccx(), ty);
1270 adt::trans_set_discr(bcx, &*repr, lldest, 0);
1277 bcx.tcx().sess.span_bug(ref_expr.span, format!(
1278 "Non-DPS def {} referened by {}",
1279 def, bcx.node_id_to_string(ref_expr.id))[]);
1284 fn trans_def_fn_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1285 ref_expr: &ast::Expr,
1287 -> DatumBlock<'blk, 'tcx, Expr> {
1288 let _icx = push_ctxt("trans_def_datum_unadjusted");
1290 let llfn = match def {
1291 def::DefFn(did, _) |
1292 def::DefStruct(did) | def::DefVariant(_, did, _) |
1293 def::DefStaticMethod(did, def::FromImpl(_)) |
1294 def::DefMethod(did, _, def::FromImpl(_)) => {
1295 callee::trans_fn_ref(bcx, did, ExprId(ref_expr.id))
1297 def::DefStaticMethod(impl_did, def::FromTrait(trait_did)) |
1298 def::DefMethod(impl_did, _, def::FromTrait(trait_did)) => {
1299 meth::trans_static_method_callee(bcx, impl_did,
1300 trait_did, ref_expr.id)
1303 bcx.tcx().sess.span_bug(ref_expr.span, format!(
1304 "trans_def_fn_unadjusted invoked on: {} for {}",
1306 ref_expr.repr(bcx.tcx()))[]);
1310 let fn_ty = expr_ty(bcx, ref_expr);
1311 DatumBlock::new(bcx, Datum::new(llfn, fn_ty, RvalueExpr(Rvalue::new(ByValue))))
1314 /// Translates a reference to a local variable or argument. This always results in an lvalue datum.
1315 pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1317 -> Datum<'tcx, Lvalue> {
1318 let _icx = push_ctxt("trans_local_var");
1321 def::DefUpvar(nid, _, _) => {
1322 // Can't move upvars, so this is never a ZeroMemLastUse.
1323 let local_ty = node_id_type(bcx, nid);
1324 match bcx.fcx.llupvars.borrow().get(&nid) {
1325 Some(&val) => Datum::new(val, local_ty, Lvalue),
1327 bcx.sess().bug(format!(
1328 "trans_local_var: no llval for upvar {} found",
1333 def::DefLocal(nid) => {
1334 let datum = match bcx.fcx.lllocals.borrow().get(&nid) {
1337 bcx.sess().bug(format!(
1338 "trans_local_var: no datum for local/arg {} found",
1342 debug!("take_local(nid={}, v={}, ty={})",
1343 nid, bcx.val_to_string(datum.val), bcx.ty_to_string(datum.ty));
1347 bcx.sess().unimpl(format!(
1348 "unsupported def type in trans_local_var: {}",
1354 /// Helper for enumerating the field types of structs, enums, or records. The optional node ID here
1355 /// is the node ID of the path identifying the enum variant in use. If none, this cannot possibly
1356 /// an enum variant (so, if it is and `node_id_opt` is none, this function panics).
1357 pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>,
1359 node_id_opt: Option<ast::NodeId>,
1362 F: FnOnce(ty::Disr, &[ty::field<'tcx>]) -> R,
1365 ty::ty_struct(did, substs) => {
1366 op(0, struct_fields(tcx, did, substs)[])
1369 ty::ty_tup(ref v) => {
1370 op(0, tup_fields(v[])[])
1373 ty::ty_enum(_, substs) => {
1374 // We want the *variant* ID here, not the enum ID.
1377 tcx.sess.bug(format!(
1378 "cannot get field types from the enum type {} \
1383 let def = tcx.def_map.borrow()[node_id].clone();
1385 def::DefVariant(enum_id, variant_id, _) => {
1386 let variant_info = ty::enum_variant_with_id(
1387 tcx, enum_id, variant_id);
1388 op(variant_info.disr_val,
1394 tcx.sess.bug("resolve didn't map this expr to a \
1403 tcx.sess.bug(format!(
1404 "cannot get field types from the type {}",
1410 fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1411 fields: &[ast::Field],
1412 base: Option<&ast::Expr>,
1413 expr_span: codemap::Span,
1414 expr_id: ast::NodeId,
1416 dest: Dest) -> Block<'blk, 'tcx> {
1417 let _icx = push_ctxt("trans_rec");
1419 let tcx = bcx.tcx();
1420 with_field_tys(tcx, ty, Some(expr_id), |discr, field_tys| {
1421 let mut need_base: Vec<_> = repeat(true).take(field_tys.len()).collect();
1423 let numbered_fields = fields.iter().map(|field| {
1425 field_tys.iter().position(|field_ty|
1426 field_ty.name == field.ident.node.name);
1429 need_base[i] = false;
1433 tcx.sess.span_bug(field.span,
1434 "Couldn't find field in struct type")
1437 }).collect::<Vec<_>>();
1438 let optbase = match base {
1439 Some(base_expr) => {
1440 let mut leftovers = Vec::new();
1441 for (i, b) in need_base.iter().enumerate() {
1443 leftovers.push((i, field_tys[i].mt.ty))
1446 Some(StructBaseInfo {expr: base_expr,
1447 fields: leftovers })
1450 if need_base.iter().any(|b| *b) {
1451 tcx.sess.span_bug(expr_span, "missing fields and no base expr")
1463 Some(NodeInfo { id: expr_id, span: expr_span }))
1467 /// Information that `trans_adt` needs in order to fill in the fields
1468 /// of a struct copied from a base struct (e.g., from an expression
1469 /// like `Foo { a: b, ..base }`.
1471 /// Note that `fields` may be empty; the base expression must always be
1472 /// evaluated for side-effects.
1473 pub struct StructBaseInfo<'a, 'tcx> {
1474 /// The base expression; will be evaluated after all explicit fields.
1475 expr: &'a ast::Expr,
1476 /// The indices of fields to copy paired with their types.
1477 fields: Vec<(uint, Ty<'tcx>)>
1480 /// Constructs an ADT instance:
1482 /// - `fields` should be a list of field indices paired with the
1483 /// expression to store into that field. The initializers will be
1484 /// evaluated in the order specified by `fields`.
1486 /// - `optbase` contains information on the base struct (if any) from
1487 /// which remaining fields are copied; see comments on `StructBaseInfo`.
1488 pub fn trans_adt<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1491 fields: &[(uint, &ast::Expr)],
1492 optbase: Option<StructBaseInfo<'a, 'tcx>>,
1494 source_location: Option<NodeInfo>)
1495 -> Block<'blk, 'tcx> {
1496 let _icx = push_ctxt("trans_adt");
1498 let repr = adt::represent_type(bcx.ccx(), ty);
1500 match source_location {
1501 Some(src_loc) => debuginfo::set_source_location(bcx.fcx,
1507 // If we don't care about the result, just make a
1508 // temporary stack slot
1509 let addr = match dest {
1511 Ignore => alloc_ty(bcx, ty, "temp"),
1514 // This scope holds intermediates that must be cleaned should
1515 // panic occur before the ADT as a whole is ready.
1516 let custom_cleanup_scope = fcx.push_custom_cleanup_scope();
1518 // First we trans the base, if we have one, to the dest
1519 for base in optbase.iter() {
1520 assert_eq!(discr, 0);
1522 match ty::expr_kind(bcx.tcx(), &*base.expr) {
1523 ty::RvalueDpsExpr | ty::RvalueDatumExpr if !type_needs_drop(bcx.tcx(), ty) => {
1524 bcx = trans_into(bcx, &*base.expr, SaveIn(addr));
1526 ty::RvalueStmtExpr => bcx.tcx().sess.bug("unexpected expr kind for struct base expr"),
1528 let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, &*base.expr, "base"));
1529 for &(i, t) in base.fields.iter() {
1530 let datum = base_datum.get_element(
1531 bcx, t, |srcval| adt::trans_field_ptr(bcx, &*repr, srcval, discr, i));
1532 assert!(type_is_sized(bcx.tcx(), datum.ty));
1533 let dest = adt::trans_field_ptr(bcx, &*repr, addr, discr, i);
1534 bcx = datum.store_to(bcx, dest);
1540 match source_location {
1541 Some(src_loc) => debuginfo::set_source_location(bcx.fcx,
1547 if ty::type_is_simd(bcx.tcx(), ty) {
1548 // This is the constructor of a SIMD type, such types are
1549 // always primitive machine types and so do not have a
1550 // destructor or require any clean-up.
1551 let llty = type_of::type_of(bcx.ccx(), ty);
1553 // keep a vector as a register, and running through the field
1554 // `insertelement`ing them directly into that register
1555 // (i.e. avoid GEPi and `store`s to an alloca) .
1556 let mut vec_val = C_undef(llty);
1558 for &(i, ref e) in fields.iter() {
1559 let block_datum = trans(bcx, &**e);
1560 bcx = block_datum.bcx;
1561 let position = C_uint(bcx.ccx(), i);
1562 let value = block_datum.datum.to_llscalarish(bcx);
1563 vec_val = InsertElement(bcx, vec_val, value, position);
1565 Store(bcx, vec_val, addr);
1567 // Now, we just overwrite the fields we've explicitly specified
1568 for &(i, ref e) in fields.iter() {
1569 let dest = adt::trans_field_ptr(bcx, &*repr, addr, discr, i);
1570 let e_ty = expr_ty_adjusted(bcx, &**e);
1571 bcx = trans_into(bcx, &**e, SaveIn(dest));
1572 let scope = cleanup::CustomScope(custom_cleanup_scope);
1573 fcx.schedule_lifetime_end(scope, dest);
1574 fcx.schedule_drop_mem(scope, dest, e_ty);
1578 adt::trans_set_discr(bcx, &*repr, addr, discr);
1580 fcx.pop_custom_cleanup_scope(custom_cleanup_scope);
1582 // If we don't care about the result drop the temporary we made
1586 bcx = glue::drop_ty(bcx, addr, ty, source_location);
1587 base::call_lifetime_end(bcx, addr);
1594 fn trans_immediate_lit<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1597 -> DatumBlock<'blk, 'tcx, Expr> {
1598 // must not be a string constant, that is a RvalueDpsExpr
1599 let _icx = push_ctxt("trans_immediate_lit");
1600 let ty = expr_ty(bcx, expr);
1601 let v = consts::const_lit(bcx.ccx(), expr, lit);
1602 immediate_rvalue_bcx(bcx, v, ty).to_expr_datumblock()
1605 fn trans_unary<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1608 sub_expr: &ast::Expr)
1609 -> DatumBlock<'blk, 'tcx, Expr> {
1610 let ccx = bcx.ccx();
1612 let _icx = push_ctxt("trans_unary_datum");
1614 let method_call = MethodCall::expr(expr.id);
1616 // The only overloaded operator that is translated to a datum
1617 // is an overloaded deref, since it is always yields a `&T`.
1618 // Otherwise, we should be in the RvalueDpsExpr path.
1620 op == ast::UnDeref ||
1621 !ccx.tcx().method_map.borrow().contains_key(&method_call));
1623 let un_ty = expr_ty(bcx, expr);
1627 let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
1628 let llresult = Not(bcx, datum.to_llscalarish(bcx));
1629 immediate_rvalue_bcx(bcx, llresult, un_ty).to_expr_datumblock()
1632 let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
1633 let val = datum.to_llscalarish(bcx);
1635 if ty::type_is_fp(un_ty) {
1641 immediate_rvalue_bcx(bcx, llneg, un_ty).to_expr_datumblock()
1644 trans_uniq_expr(bcx, un_ty, sub_expr, expr_ty(bcx, sub_expr))
1647 let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
1648 deref_once(bcx, expr, datum, method_call)
1653 fn trans_uniq_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1655 contents: &ast::Expr,
1656 contents_ty: Ty<'tcx>)
1657 -> DatumBlock<'blk, 'tcx, Expr> {
1658 let _icx = push_ctxt("trans_uniq_expr");
1660 assert!(type_is_sized(bcx.tcx(), contents_ty));
1661 let llty = type_of::type_of(bcx.ccx(), contents_ty);
1662 let size = llsize_of(bcx.ccx(), llty);
1663 let align = C_uint(bcx.ccx(), type_of::align_of(bcx.ccx(), contents_ty));
1664 let llty_ptr = llty.ptr_to();
1665 let Result { bcx, val } = malloc_raw_dyn(bcx, llty_ptr, box_ty, size, align);
1666 // Unique boxes do not allocate for zero-size types. The standard library
1667 // may assume that `free` is never called on the pointer returned for
1668 // `Box<ZeroSizeType>`.
1669 let bcx = if llsize_of_alloc(bcx.ccx(), llty) == 0 {
1670 trans_into(bcx, contents, SaveIn(val))
1672 let custom_cleanup_scope = fcx.push_custom_cleanup_scope();
1673 fcx.schedule_free_value(cleanup::CustomScope(custom_cleanup_scope),
1674 val, cleanup::HeapExchange, contents_ty);
1675 let bcx = trans_into(bcx, contents, SaveIn(val));
1676 fcx.pop_custom_cleanup_scope(custom_cleanup_scope);
1679 immediate_rvalue_bcx(bcx, val, box_ty).to_expr_datumblock()
1682 fn trans_addr_of<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1684 subexpr: &ast::Expr)
1685 -> DatumBlock<'blk, 'tcx, Expr> {
1686 let _icx = push_ctxt("trans_addr_of");
1688 let sub_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, subexpr, "addr_of"));
1689 match sub_datum.ty.sty {
1691 // Opened DST value, close to a fat pointer
1692 debug!("Closing fat pointer {}", bcx.ty_to_string(sub_datum.ty));
1694 let scratch = rvalue_scratch_datum(bcx,
1695 ty::close_type(bcx.tcx(), sub_datum.ty),
1697 let base = Load(bcx, get_dataptr(bcx, sub_datum.val));
1698 Store(bcx, base, get_dataptr(bcx, scratch.val));
1700 let len = Load(bcx, get_len(bcx, sub_datum.val));
1701 Store(bcx, len, get_len(bcx, scratch.val));
1703 DatumBlock::new(bcx, scratch.to_expr_datum())
1706 // Sized value, ref to a thin pointer
1707 let ty = expr_ty(bcx, expr);
1708 immediate_rvalue_bcx(bcx, sub_datum.val, ty).to_expr_datumblock()
1713 // Important to get types for both lhs and rhs, because one might be _|_
1714 // and the other not.
1715 fn trans_eager_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1716 binop_expr: &ast::Expr,
1723 -> DatumBlock<'blk, 'tcx, Expr> {
1724 let _icx = push_ctxt("trans_eager_binop");
1726 let tcx = bcx.tcx();
1727 let is_simd = ty::type_is_simd(tcx, lhs_t);
1729 if is_simd { ty::simd_type(tcx, lhs_t) }
1732 let is_float = ty::type_is_fp(intype);
1733 let is_signed = ty::type_is_signed(intype);
1735 let rhs = base::cast_shift_expr_rhs(bcx, op, lhs, rhs);
1738 let val = match op {
1740 if is_float { FAdd(bcx, lhs, rhs) }
1741 else { Add(bcx, lhs, rhs) }
1744 if is_float { FSub(bcx, lhs, rhs) }
1745 else { Sub(bcx, lhs, rhs) }
1748 if is_float { FMul(bcx, lhs, rhs) }
1749 else { Mul(bcx, lhs, rhs) }
1755 // Only zero-check integers; fp /0 is NaN
1756 bcx = base::fail_if_zero_or_overflows(bcx, binop_expr.span,
1757 op, lhs, rhs, rhs_t);
1769 // Only zero-check integers; fp %0 is NaN
1770 bcx = base::fail_if_zero_or_overflows(bcx, binop_expr.span,
1771 op, lhs, rhs, rhs_t);
1779 ast::BiBitOr => Or(bcx, lhs, rhs),
1780 ast::BiBitAnd => And(bcx, lhs, rhs),
1781 ast::BiBitXor => Xor(bcx, lhs, rhs),
1782 ast::BiShl => Shl(bcx, lhs, rhs),
1786 } else { LShr(bcx, lhs, rhs) }
1788 ast::BiEq | ast::BiNe | ast::BiLt | ast::BiGe | ast::BiLe | ast::BiGt => {
1789 if ty::type_is_scalar(rhs_t) {
1790 unpack_result!(bcx, base::compare_scalar_types(bcx, lhs, rhs, rhs_t, op))
1792 base::compare_simd_types(bcx, lhs, rhs, intype, ty::simd_size(tcx, lhs_t), op)
1794 bcx.tcx().sess.span_bug(binop_expr.span, "comparison operator unsupported for type")
1798 bcx.tcx().sess.span_bug(binop_expr.span, "unexpected binop");
1802 immediate_rvalue_bcx(bcx, val, binop_ty).to_expr_datumblock()
1805 // refinement types would obviate the need for this
1806 enum lazy_binop_ty {
1811 fn trans_lazy_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1812 binop_expr: &ast::Expr,
1816 -> DatumBlock<'blk, 'tcx, Expr> {
1817 let _icx = push_ctxt("trans_lazy_binop");
1818 let binop_ty = expr_ty(bcx, binop_expr);
1821 let DatumBlock {bcx: past_lhs, datum: lhs} = trans(bcx, a);
1822 let lhs = lhs.to_llscalarish(past_lhs);
1824 if past_lhs.unreachable.get() {
1825 return immediate_rvalue_bcx(past_lhs, lhs, binop_ty).to_expr_datumblock();
1828 let join = fcx.new_id_block("join", binop_expr.id);
1829 let before_rhs = fcx.new_id_block("before_rhs", b.id);
1832 lazy_and => CondBr(past_lhs, lhs, before_rhs.llbb, join.llbb),
1833 lazy_or => CondBr(past_lhs, lhs, join.llbb, before_rhs.llbb)
1836 let DatumBlock {bcx: past_rhs, datum: rhs} = trans(before_rhs, b);
1837 let rhs = rhs.to_llscalarish(past_rhs);
1839 if past_rhs.unreachable.get() {
1840 return immediate_rvalue_bcx(join, lhs, binop_ty).to_expr_datumblock();
1843 Br(past_rhs, join.llbb);
1844 let phi = Phi(join, Type::i1(bcx.ccx()), &[lhs, rhs],
1845 &[past_lhs.llbb, past_rhs.llbb]);
1847 return immediate_rvalue_bcx(join, phi, binop_ty).to_expr_datumblock();
1850 fn trans_binary<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1855 -> DatumBlock<'blk, 'tcx, Expr> {
1856 let _icx = push_ctxt("trans_binary");
1857 let ccx = bcx.ccx();
1859 // if overloaded, would be RvalueDpsExpr
1860 assert!(!ccx.tcx().method_map.borrow().contains_key(&MethodCall::expr(expr.id)));
1864 trans_lazy_binop(bcx, expr, lazy_and, lhs, rhs)
1867 trans_lazy_binop(bcx, expr, lazy_or, lhs, rhs)
1871 let lhs_datum = unpack_datum!(bcx, trans(bcx, lhs));
1872 let rhs_datum = unpack_datum!(bcx, trans(bcx, rhs));
1873 let binop_ty = expr_ty(bcx, expr);
1875 debug!("trans_binary (expr {}): lhs_datum={}",
1877 lhs_datum.to_string(ccx));
1878 let lhs_ty = lhs_datum.ty;
1879 let lhs = lhs_datum.to_llscalarish(bcx);
1881 debug!("trans_binary (expr {}): rhs_datum={}",
1883 rhs_datum.to_string(ccx));
1884 let rhs_ty = rhs_datum.ty;
1885 let rhs = rhs_datum.to_llscalarish(bcx);
1886 trans_eager_binop(bcx, expr, binop_ty, op,
1887 lhs_ty, lhs, rhs_ty, rhs)
1892 fn trans_overloaded_op<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1894 method_call: MethodCall,
1895 lhs: Datum<'tcx, Expr>,
1896 rhs: Vec<(Datum<'tcx, Expr>, ast::NodeId)>,
1899 -> Result<'blk, 'tcx> {
1900 let method_ty = (*bcx.tcx().method_map.borrow())[method_call].ty;
1901 callee::trans_call_inner(bcx,
1902 Some(expr_info(expr)),
1903 monomorphize_type(bcx, method_ty),
1904 |bcx, arg_cleanup_scope| {
1905 meth::trans_method_callee(bcx,
1910 callee::ArgOverloadedOp(lhs, rhs, autoref),
1914 fn trans_overloaded_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1916 callee: &'a ast::Expr,
1917 args: &'a [P<ast::Expr>],
1919 -> Block<'blk, 'tcx> {
1920 let method_call = MethodCall::expr(expr.id);
1921 let method_type = (*bcx.tcx()
1923 .borrow())[method_call]
1925 let mut all_args = vec!(callee);
1926 all_args.extend(args.iter().map(|e| &**e));
1928 callee::trans_call_inner(bcx,
1929 Some(expr_info(expr)),
1930 monomorphize_type(bcx,
1932 |bcx, arg_cleanup_scope| {
1933 meth::trans_method_callee(
1939 callee::ArgOverloadedCall(all_args),
1944 fn int_cast(bcx: Block,
1950 let _icx = push_ctxt("int_cast");
1952 let srcsz = llvm::LLVMGetIntTypeWidth(llsrctype.to_ref());
1953 let dstsz = llvm::LLVMGetIntTypeWidth(lldsttype.to_ref());
1954 return if dstsz == srcsz {
1955 BitCast(bcx, llsrc, lldsttype)
1956 } else if srcsz > dstsz {
1957 TruncOrBitCast(bcx, llsrc, lldsttype)
1959 SExtOrBitCast(bcx, llsrc, lldsttype)
1961 ZExtOrBitCast(bcx, llsrc, lldsttype)
1966 fn float_cast(bcx: Block,
1971 let _icx = push_ctxt("float_cast");
1972 let srcsz = llsrctype.float_width();
1973 let dstsz = lldsttype.float_width();
1974 return if dstsz > srcsz {
1975 FPExt(bcx, llsrc, lldsttype)
1976 } else if srcsz > dstsz {
1977 FPTrunc(bcx, llsrc, lldsttype)
1981 #[deriving(Copy, PartialEq, Show)]
1982 pub enum cast_kind {
1990 pub fn cast_type_kind<'tcx>(tcx: &ty::ctxt<'tcx>, t: Ty<'tcx>) -> cast_kind {
1992 ty::ty_char => cast_integral,
1993 ty::ty_float(..) => cast_float,
1994 ty::ty_rptr(_, mt) | ty::ty_ptr(mt) => {
1995 if type_is_sized(tcx, mt.ty) {
2001 ty::ty_bare_fn(..) => cast_pointer,
2002 ty::ty_int(..) => cast_integral,
2003 ty::ty_uint(..) => cast_integral,
2004 ty::ty_bool => cast_integral,
2005 ty::ty_enum(..) => cast_enum,
2010 fn cast_is_noop<'tcx>(t_in: Ty<'tcx>, t_out: Ty<'tcx>) -> bool {
2011 match (ty::deref(t_in, true), ty::deref(t_out, true)) {
2012 (Some(ty::mt{ ty: t_in, .. }), Some(ty::mt{ ty: t_out, .. })) => {
2019 fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2022 -> DatumBlock<'blk, 'tcx, Expr> {
2023 let _icx = push_ctxt("trans_cast");
2025 let ccx = bcx.ccx();
2027 let t_in = expr_ty(bcx, expr);
2028 let t_out = node_id_type(bcx, id);
2029 let k_in = cast_type_kind(bcx.tcx(), t_in);
2030 let k_out = cast_type_kind(bcx.tcx(), t_out);
2031 let s_in = k_in == cast_integral && ty::type_is_signed(t_in);
2032 let ll_t_in = type_of::arg_type_of(ccx, t_in);
2033 let ll_t_out = type_of::arg_type_of(ccx, t_out);
2035 // Convert the value to be cast into a ValueRef, either by-ref or
2036 // by-value as appropriate given its type:
2037 let mut datum = unpack_datum!(bcx, trans(bcx, expr));
2039 if cast_is_noop(datum.ty, t_out) {
2041 return DatumBlock::new(bcx, datum);
2044 let newval = match (k_in, k_out) {
2045 (cast_integral, cast_integral) => {
2046 let llexpr = datum.to_llscalarish(bcx);
2047 int_cast(bcx, ll_t_out, ll_t_in, llexpr, s_in)
2049 (cast_float, cast_float) => {
2050 let llexpr = datum.to_llscalarish(bcx);
2051 float_cast(bcx, ll_t_out, ll_t_in, llexpr)
2053 (cast_integral, cast_float) => {
2054 let llexpr = datum.to_llscalarish(bcx);
2056 SIToFP(bcx, llexpr, ll_t_out)
2057 } else { UIToFP(bcx, llexpr, ll_t_out) }
2059 (cast_float, cast_integral) => {
2060 let llexpr = datum.to_llscalarish(bcx);
2061 if ty::type_is_signed(t_out) {
2062 FPToSI(bcx, llexpr, ll_t_out)
2063 } else { FPToUI(bcx, llexpr, ll_t_out) }
2065 (cast_integral, cast_pointer) => {
2066 let llexpr = datum.to_llscalarish(bcx);
2067 IntToPtr(bcx, llexpr, ll_t_out)
2069 (cast_pointer, cast_integral) => {
2070 let llexpr = datum.to_llscalarish(bcx);
2071 PtrToInt(bcx, llexpr, ll_t_out)
2073 (cast_pointer, cast_pointer) => {
2074 let llexpr = datum.to_llscalarish(bcx);
2075 PointerCast(bcx, llexpr, ll_t_out)
2077 (cast_enum, cast_integral) |
2078 (cast_enum, cast_float) => {
2080 let repr = adt::represent_type(ccx, t_in);
2081 let datum = unpack_datum!(
2082 bcx, datum.to_lvalue_datum(bcx, "trans_imm_cast", expr.id));
2083 let llexpr_ptr = datum.to_llref();
2085 adt::trans_get_discr(bcx, &*repr, llexpr_ptr, Some(Type::i64(ccx)));
2087 cast_integral => int_cast(bcx, ll_t_out,
2088 val_ty(lldiscrim_a),
2090 cast_float => SIToFP(bcx, lldiscrim_a, ll_t_out),
2092 ccx.sess().bug(format!("translating unsupported cast: \
2093 {} ({}) -> {} ({})",
2094 t_in.repr(bcx.tcx()),
2096 t_out.repr(bcx.tcx()),
2101 _ => ccx.sess().bug(format!("translating unsupported cast: \
2102 {} ({}) -> {} ({})",
2103 t_in.repr(bcx.tcx()),
2105 t_out.repr(bcx.tcx()),
2108 return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock();
2111 fn trans_assign_op<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2116 -> Block<'blk, 'tcx> {
2117 let _icx = push_ctxt("trans_assign_op");
2120 debug!("trans_assign_op(expr={})", bcx.expr_to_string(expr));
2122 // User-defined operator methods cannot be used with `+=` etc right now
2123 assert!(!bcx.tcx().method_map.borrow().contains_key(&MethodCall::expr(expr.id)));
2125 // Evaluate LHS (destination), which should be an lvalue
2126 let dst_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, dst, "assign_op"));
2127 assert!(!type_needs_drop(bcx.tcx(), dst_datum.ty));
2128 let dst_ty = dst_datum.ty;
2129 let dst = load_ty(bcx, dst_datum.val, dst_datum.ty);
2132 let rhs_datum = unpack_datum!(bcx, trans(bcx, &*src));
2133 let rhs_ty = rhs_datum.ty;
2134 let rhs = rhs_datum.to_llscalarish(bcx);
2136 // Perform computation and store the result
2137 let result_datum = unpack_datum!(
2138 bcx, trans_eager_binop(bcx, expr, dst_datum.ty, op,
2139 dst_ty, dst, rhs_ty, rhs));
2140 return result_datum.store_to(bcx, dst_datum.val);
2143 fn auto_ref<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2144 datum: Datum<'tcx, Expr>,
2146 -> DatumBlock<'blk, 'tcx, Expr> {
2149 // Ensure cleanup of `datum` if not already scheduled and obtain
2150 // a "by ref" pointer.
2151 let lv_datum = unpack_datum!(bcx, datum.to_lvalue_datum(bcx, "autoref", expr.id));
2153 // Compute final type. Note that we are loose with the region and
2154 // mutability, since those things don't matter in trans.
2155 let referent_ty = lv_datum.ty;
2156 let ptr_ty = ty::mk_imm_rptr(bcx.tcx(), bcx.tcx().mk_region(ty::ReStatic), referent_ty);
2159 let llref = lv_datum.to_llref();
2161 // Construct the resulting datum, using what was the "by ref"
2162 // ValueRef of type `referent_ty` to be the "by value" ValueRef
2163 // of type `&referent_ty`.
2164 DatumBlock::new(bcx, Datum::new(llref, ptr_ty, RvalueExpr(Rvalue::new(ByValue))))
2167 fn deref_multiple<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2169 datum: Datum<'tcx, Expr>,
2171 -> DatumBlock<'blk, 'tcx, Expr> {
2173 let mut datum = datum;
2174 for i in range(0, times) {
2175 let method_call = MethodCall::autoderef(expr.id, i);
2176 datum = unpack_datum!(bcx, deref_once(bcx, expr, datum, method_call));
2178 DatumBlock { bcx: bcx, datum: datum }
2181 fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2183 datum: Datum<'tcx, Expr>,
2184 method_call: MethodCall)
2185 -> DatumBlock<'blk, 'tcx, Expr> {
2186 let ccx = bcx.ccx();
2188 debug!("deref_once(expr={}, datum={}, method_call={})",
2189 expr.repr(bcx.tcx()),
2190 datum.to_string(ccx),
2195 // Check for overloaded deref.
2196 let method_ty = ccx.tcx().method_map.borrow()
2197 .get(&method_call).map(|method| method.ty);
2198 let datum = match method_ty {
2199 Some(method_ty) => {
2200 // Overloaded. Evaluate `trans_overloaded_op`, which will
2201 // invoke the user's deref() method, which basically
2202 // converts from the `Smaht<T>` pointer that we have into
2203 // a `&T` pointer. We can then proceed down the normal
2204 // path (below) to dereference that `&T`.
2205 let datum = match method_call.adjustment {
2206 // Always perform an AutoPtr when applying an overloaded auto-deref
2207 ty::AutoDeref(_) => unpack_datum!(bcx, auto_ref(bcx, datum, expr)),
2211 let ref_ty = ty::ty_fn_ret(monomorphize_type(bcx, method_ty)).unwrap();
2212 let scratch = rvalue_scratch_datum(bcx, ref_ty, "overloaded_deref");
2214 unpack_result!(bcx, trans_overloaded_op(bcx, expr, method_call,
2215 datum, Vec::new(), Some(SaveIn(scratch.val)),
2217 scratch.to_expr_datum()
2220 // Not overloaded. We already have a pointer we know how to deref.
2225 let r = match datum.ty.sty {
2226 ty::ty_uniq(content_ty) => {
2227 if type_is_sized(bcx.tcx(), content_ty) {
2228 deref_owned_pointer(bcx, expr, datum, content_ty)
2230 // A fat pointer and an opened DST value have the same
2231 // representation just different types. Since there is no
2232 // temporary for `*e` here (because it is unsized), we cannot
2233 // emulate the sized object code path for running drop glue and
2234 // free. Instead, we schedule cleanup for `e`, turning it into
2236 let datum = unpack_datum!(
2237 bcx, datum.to_lvalue_datum(bcx, "deref", expr.id));
2239 let datum = Datum::new(datum.val, ty::mk_open(bcx.tcx(), content_ty), LvalueExpr);
2240 DatumBlock::new(bcx, datum)
2244 ty::ty_ptr(ty::mt { ty: content_ty, .. }) |
2245 ty::ty_rptr(_, ty::mt { ty: content_ty, .. }) => {
2246 if type_is_sized(bcx.tcx(), content_ty) {
2247 let ptr = datum.to_llscalarish(bcx);
2249 // Always generate an lvalue datum, even if datum.mode is
2250 // an rvalue. This is because datum.mode is only an
2251 // rvalue for non-owning pointers like &T or *T, in which
2252 // case cleanup *is* scheduled elsewhere, by the true
2253 // owner (or, in the case of *T, by the user).
2254 DatumBlock::new(bcx, Datum::new(ptr, content_ty, LvalueExpr))
2256 // A fat pointer and an opened DST value have the same representation
2257 // just different types.
2258 DatumBlock::new(bcx, Datum::new(datum.val,
2259 ty::mk_open(bcx.tcx(), content_ty),
2265 bcx.tcx().sess.span_bug(
2267 format!("deref invoked on expr of illegal type {}",
2268 datum.ty.repr(bcx.tcx()))[]);
2272 debug!("deref_once(expr={}, method_call={}, result={})",
2273 expr.id, method_call, r.datum.to_string(ccx));
2277 /// We microoptimize derefs of owned pointers a bit here. Basically, the idea is to make the
2278 /// deref of an rvalue result in an rvalue. This helps to avoid intermediate stack slots in the
2279 /// resulting LLVM. The idea here is that, if the `Box<T>` pointer is an rvalue, then we can
2280 /// schedule a *shallow* free of the `Box<T>` pointer, and then return a ByRef rvalue into the
2281 /// pointer. Because the free is shallow, it is legit to return an rvalue, because we know that
2282 /// the contents are not yet scheduled to be freed. The language rules ensure that the contents
2283 /// will be used (or moved) before the free occurs.
2284 fn deref_owned_pointer<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2286 datum: Datum<'tcx, Expr>,
2287 content_ty: Ty<'tcx>)
2288 -> DatumBlock<'blk, 'tcx, Expr> {
2290 RvalueExpr(Rvalue { mode: ByRef }) => {
2291 let scope = cleanup::temporary_scope(bcx.tcx(), expr.id);
2292 let ptr = Load(bcx, datum.val);
2293 if !type_is_zero_size(bcx.ccx(), content_ty) {
2294 bcx.fcx.schedule_free_value(scope, ptr, cleanup::HeapExchange, content_ty);
2297 RvalueExpr(Rvalue { mode: ByValue }) => {
2298 let scope = cleanup::temporary_scope(bcx.tcx(), expr.id);
2299 if !type_is_zero_size(bcx.ccx(), content_ty) {
2300 bcx.fcx.schedule_free_value(scope, datum.val, cleanup::HeapExchange,
2307 // If we had an rvalue in, we produce an rvalue out.
2308 let (llptr, kind) = match datum.kind {
2310 (Load(bcx, datum.val), LvalueExpr)
2312 RvalueExpr(Rvalue { mode: ByRef }) => {
2313 (Load(bcx, datum.val), RvalueExpr(Rvalue::new(ByRef)))
2315 RvalueExpr(Rvalue { mode: ByValue }) => {
2316 (datum.val, RvalueExpr(Rvalue::new(ByRef)))
2320 let datum = Datum { ty: content_ty, val: llptr, kind: kind };
2321 DatumBlock { bcx: bcx, datum: datum }