1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! # Translation of Expressions
13 //! Public entry points:
15 //! - `trans_into(bcx, expr, dest) -> bcx`: evaluates an expression,
16 //! storing the result into `dest`. This is the preferred form, if you
19 //! - `trans(bcx, expr) -> DatumBlock`: evaluates an expression, yielding
20 //! `Datum` with the result. You can then store the datum, inspect
21 //! the value, etc. This may introduce temporaries if the datum is a
24 //! - `trans_to_lvalue(bcx, expr, "...") -> DatumBlock`: evaluates an
25 //! expression and ensures that the result has a cleanup associated with it,
26 //! creating a temporary stack slot if necessary.
28 //! - `trans_local_var -> Datum`: looks up a local variable or upvar.
30 //! See doc.rs for more comments.
32 #![allow(non_camel_case_types)]
34 pub use self::cast_kind::*;
35 pub use self::Dest::*;
36 use self::lazy_binop_ty::*;
39 use llvm::{self, ValueRef};
41 use middle::mem_categorization::Typer;
42 use middle::subst::{self, Substs};
43 use trans::{_match, adt, asm, base, callee, closure, consts, controlflow};
46 use trans::cleanup::{self, CleanupMethods};
56 use middle::ty::{struct_fields, tup_fields};
57 use middle::ty::{AdjustDerefRef, AdjustReifyFnPointer, AutoUnsafe};
58 use middle::ty::{AutoPtr};
59 use middle::ty::{self, Ty};
60 use middle::ty::MethodCall;
61 use util::common::indenter;
62 use util::ppaux::Repr;
63 use trans::machine::{llsize_of, llsize_of_alloc};
64 use trans::type_::Type;
66 use syntax::{ast, ast_util, codemap};
67 use syntax::print::pprust::{expr_to_string};
69 use syntax::parse::token;
71 use std::iter::repeat;
75 // These are passed around by the code generating functions to track the
76 // destination of a computation's value.
78 #[derive(Copy, PartialEq)]
85 pub fn to_string(&self, ccx: &CrateContext) -> String {
87 SaveIn(v) => format!("SaveIn({})", ccx.tn().val_to_string(v)),
88 Ignore => "Ignore".to_string()
93 /// This function is equivalent to `trans(bcx, expr).store_to_dest(dest)` but it may generate
94 /// better optimized LLVM code.
95 pub fn trans_into<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
98 -> Block<'blk, 'tcx> {
101 if bcx.tcx().adjustments.borrow().contains_key(&expr.id) {
102 // use trans, which may be less efficient but
103 // which will perform the adjustments:
104 let datum = unpack_datum!(bcx, trans(bcx, expr));
105 return datum.store_to_dest(bcx, dest, expr.id)
108 debug!("trans_into() expr={}", expr.repr(bcx.tcx()));
110 let cleanup_debug_loc = debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
114 bcx.fcx.push_ast_cleanup_scope(cleanup_debug_loc);
116 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
117 let kind = ty::expr_kind(bcx.tcx(), expr);
119 ty::LvalueExpr | ty::RvalueDatumExpr => {
120 trans_unadjusted(bcx, expr).store_to_dest(dest, expr.id)
122 ty::RvalueDpsExpr => {
123 trans_rvalue_dps_unadjusted(bcx, expr, dest)
125 ty::RvalueStmtExpr => {
126 trans_rvalue_stmt_unadjusted(bcx, expr)
130 bcx.fcx.pop_and_trans_ast_cleanup_scope(bcx, expr.id)
133 /// Translates an expression, returning a datum (and new block) encapsulating the result. When
134 /// possible, it is preferred to use `trans_into`, as that may avoid creating a temporary on the
136 pub fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
138 -> DatumBlock<'blk, 'tcx, Expr> {
139 debug!("trans(expr={})", bcx.expr_to_string(expr));
144 let cleanup_debug_loc = debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
148 fcx.push_ast_cleanup_scope(cleanup_debug_loc);
149 let datum = unpack_datum!(bcx, trans_unadjusted(bcx, expr));
150 let datum = unpack_datum!(bcx, apply_adjustments(bcx, expr, datum));
151 bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, expr.id);
152 return DatumBlock::new(bcx, datum);
155 pub fn get_len(bcx: Block, fat_ptr: ValueRef) -> ValueRef {
156 GEPi(bcx, fat_ptr, &[0u, abi::FAT_PTR_EXTRA])
159 pub fn get_dataptr(bcx: Block, fat_ptr: ValueRef) -> ValueRef {
160 GEPi(bcx, fat_ptr, &[0u, abi::FAT_PTR_ADDR])
163 /// Helper for trans that apply adjustments from `expr` to `datum`, which should be the unadjusted
164 /// translation of `expr`.
165 fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
167 datum: Datum<'tcx, Expr>)
168 -> DatumBlock<'blk, 'tcx, Expr> {
170 let mut datum = datum;
171 let adjustment = match bcx.tcx().adjustments.borrow().get(&expr.id).cloned() {
173 return DatumBlock::new(bcx, datum);
177 debug!("unadjusted datum for expr {}: {}, adjustment={}",
178 expr.repr(bcx.tcx()),
179 datum.to_string(bcx.ccx()),
180 adjustment.repr(bcx.tcx()));
182 AdjustReifyFnPointer(_def_id) => {
183 // FIXME(#19925) once fn item types are
184 // zero-sized, we'll need to do something here
186 AdjustDerefRef(ref adj) => {
187 let (autoderefs, use_autoref) = match adj.autoref {
188 // Extracting a value from a box counts as a deref, but if we are
189 // just converting Box<[T, ..n]> to Box<[T]> we aren't really doing
190 // a deref (and wouldn't if we could treat Box like a normal struct).
191 Some(ty::AutoUnsizeUniq(..)) => (adj.autoderefs - 1, true),
192 // We are a bit paranoid about adjustments and thus might have a re-
193 // borrow here which merely derefs and then refs again (it might have
194 // a different region or mutability, but we don't care here. It might
195 // also be just in case we need to unsize. But if there are no nested
196 // adjustments then it should be a no-op).
197 Some(ty::AutoPtr(_, _, None)) if adj.autoderefs == 1 => {
199 // Don't skip a conversion from Box<T> to &T, etc.
201 let method_call = MethodCall::autoderef(expr.id, adj.autoderefs-1);
202 let method = bcx.tcx().method_map.borrow().get(&method_call).is_some();
204 // Don't skip an overloaded deref.
205 (adj.autoderefs, true)
207 (adj.autoderefs - 1, false)
210 _ => (adj.autoderefs, true),
213 _ => (adj.autoderefs, true)
218 let lval = unpack_datum!(bcx, datum.to_lvalue_datum(bcx, "auto_deref", expr.id));
219 datum = unpack_datum!(
220 bcx, deref_multiple(bcx, expr, lval.to_expr_datum(), autoderefs));
223 // (You might think there is a more elegant way to do this than a
224 // use_autoref bool, but then you remember that the borrow checker exists).
225 if let (true, &Some(ref a)) = (use_autoref, &adj.autoref) {
226 datum = unpack_datum!(bcx, apply_autoref(a,
233 debug!("after adjustments, datum={}", datum.to_string(bcx.ccx()));
234 return DatumBlock::new(bcx, datum);
236 fn apply_autoref<'blk, 'tcx>(autoref: &ty::AutoRef<'tcx>,
237 bcx: Block<'blk, 'tcx>,
239 datum: Datum<'tcx, Expr>)
240 -> DatumBlock<'blk, 'tcx, Expr> {
242 let mut datum = datum;
244 let datum = match autoref {
245 &AutoPtr(_, _, ref a) | &AutoUnsafe(_, ref a) => {
248 &Some(box ref a) => {
249 datum = unpack_datum!(bcx, apply_autoref(a, bcx, expr, datum));
253 unpack_datum!(bcx, ref_ptr(bcx, expr, datum))
255 &ty::AutoUnsize(ref k) => {
256 debug!(" AutoUnsize");
257 unpack_datum!(bcx, unsize_expr(bcx, expr, datum, k))
260 &ty::AutoUnsizeUniq(ty::UnsizeLength(len)) => {
261 debug!(" AutoUnsizeUniq(UnsizeLength)");
262 unpack_datum!(bcx, unsize_unique_vec(bcx, expr, datum, len))
264 &ty::AutoUnsizeUniq(ref k) => {
265 debug!(" AutoUnsizeUniq");
266 unpack_datum!(bcx, unsize_unique_expr(bcx, expr, datum, k))
270 DatumBlock::new(bcx, datum)
273 fn ref_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
275 datum: Datum<'tcx, Expr>)
276 -> DatumBlock<'blk, 'tcx, Expr> {
277 debug!("ref_ptr(expr={}, datum={})",
278 expr.repr(bcx.tcx()),
279 datum.to_string(bcx.ccx()));
281 if !type_is_sized(bcx.tcx(), datum.ty) {
282 debug!("Taking address of unsized type {}",
283 bcx.ty_to_string(datum.ty));
284 ref_fat_ptr(bcx, expr, datum)
286 debug!("Taking address of sized type {}",
287 bcx.ty_to_string(datum.ty));
288 auto_ref(bcx, datum, expr)
292 // Retrieve the information we are losing (making dynamic) in an unsizing
294 // When making a dtor, we need to do different things depending on the
295 // ownership of the object.. mk_ty is a function for turning `unadjusted_ty`
296 // into a type to be destructed. If we want to end up with a Box pointer,
297 // then mk_ty should make a Box pointer (T -> Box<T>), if we want a
298 // borrowed reference then it should be T -> &T.
299 fn unsized_info<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
300 kind: &ty::UnsizeKind<'tcx>,
302 unadjusted_ty: Ty<'tcx>,
303 mk_ty: F) -> ValueRef where
304 F: FnOnce(Ty<'tcx>) -> Ty<'tcx>,
306 // FIXME(#19596) workaround: `|t| t` causes monomorphization recursion
307 fn identity<T>(t: T) -> T { t }
309 debug!("unsized_info(kind={}, id={}, unadjusted_ty={})",
310 kind, id, unadjusted_ty.repr(bcx.tcx()));
312 &ty::UnsizeLength(len) => C_uint(bcx.ccx(), len),
313 &ty::UnsizeStruct(box ref k, tp_index) => match unadjusted_ty.sty {
314 ty::ty_struct(_, ref substs) => {
315 let ty_substs = substs.types.get_slice(subst::TypeSpace);
316 // The dtor for a field treats it like a value, so mk_ty
317 // should just be the identity function.
318 unsized_info(bcx, k, id, ty_substs[tp_index], identity)
320 _ => bcx.sess().bug(format!("UnsizeStruct with bad sty: {}",
321 bcx.ty_to_string(unadjusted_ty))[])
323 &ty::UnsizeVtable(ty::TyTrait { ref principal, .. }, _) => {
324 // Note that we preserve binding levels here:
325 let substs = principal.0.substs.with_self_ty(unadjusted_ty).erase_regions();
326 let substs = bcx.tcx().mk_substs(substs);
328 ty::Binder(Rc::new(ty::TraitRef { def_id: principal.def_id(),
330 let trait_ref = bcx.monomorphize(&trait_ref);
331 let box_ty = mk_ty(unadjusted_ty);
333 meth::get_vtable(bcx, box_ty, trait_ref),
334 Type::vtable_ptr(bcx.ccx()))
339 fn unsize_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
341 datum: Datum<'tcx, Expr>,
342 k: &ty::UnsizeKind<'tcx>)
343 -> DatumBlock<'blk, 'tcx, Expr> {
345 let datum_ty = datum.ty;
346 let unsized_ty = ty::unsize_ty(tcx, datum_ty, k, expr.span);
347 debug!("unsized_ty={}", unsized_ty.repr(bcx.tcx()));
348 let dest_ty = ty::mk_open(tcx, unsized_ty);
349 debug!("dest_ty={}", unsized_ty.repr(bcx.tcx()));
350 // Closures for extracting and manipulating the data and payload parts of
352 let info = |: bcx, _val| unsized_info(bcx,
357 tcx.mk_region(ty::ReStatic),
360 mutbl: ast::MutImmutable
363 ty::UnsizeStruct(..) =>
364 into_fat_ptr(bcx, expr, datum, dest_ty, |bcx, val| {
365 PointerCast(bcx, val, type_of::type_of(bcx.ccx(), unsized_ty).ptr_to())
367 ty::UnsizeLength(..) =>
368 into_fat_ptr(bcx, expr, datum, dest_ty, |bcx, val| {
369 GEPi(bcx, val, &[0u, 0u])
371 ty::UnsizeVtable(..) =>
372 into_fat_ptr(bcx, expr, datum, dest_ty, |_bcx, val| {
373 PointerCast(bcx, val, Type::i8p(bcx.ccx()))
378 fn ref_fat_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
380 datum: Datum<'tcx, Expr>)
381 -> DatumBlock<'blk, 'tcx, Expr> {
383 let dest_ty = ty::close_type(tcx, datum.ty);
384 let base = |: bcx, val| Load(bcx, get_dataptr(bcx, val));
385 let len = |: bcx, val| Load(bcx, get_len(bcx, val));
386 into_fat_ptr(bcx, expr, datum, dest_ty, base, len)
389 fn into_fat_ptr<'blk, 'tcx, F, G>(bcx: Block<'blk, 'tcx>,
391 datum: Datum<'tcx, Expr>,
395 -> DatumBlock<'blk, 'tcx, Expr> where
396 F: FnOnce(Block<'blk, 'tcx>, ValueRef) -> ValueRef,
397 G: FnOnce(Block<'blk, 'tcx>, ValueRef) -> ValueRef,
402 let lval = unpack_datum!(bcx,
403 datum.to_lvalue_datum(bcx, "into_fat_ptr", expr.id));
404 let base = base(bcx, lval.val);
405 let info = info(bcx, lval.val);
407 let scratch = rvalue_scratch_datum(bcx, dest_ty, "__fat_ptr");
408 Store(bcx, base, get_dataptr(bcx, scratch.val));
409 Store(bcx, info, get_len(bcx, scratch.val));
411 DatumBlock::new(bcx, scratch.to_expr_datum())
414 fn unsize_unique_vec<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
416 datum: Datum<'tcx, Expr>,
418 -> DatumBlock<'blk, 'tcx, Expr> {
422 let datum_ty = datum.ty;
424 let lval = unpack_datum!(bcx,
425 datum.to_lvalue_datum(bcx, "unsize_unique_vec", expr.id));
427 let ll_len = C_uint(bcx.ccx(), len);
428 let unit_ty = ty::sequence_element_type(tcx, ty::type_content(datum_ty));
429 let vec_ty = ty::mk_uniq(tcx, ty::mk_vec(tcx, unit_ty, None));
430 let scratch = rvalue_scratch_datum(bcx, vec_ty, "__unsize_unique");
432 let base = get_dataptr(bcx, scratch.val);
433 let base = PointerCast(bcx,
435 type_of::type_of(bcx.ccx(), datum_ty).ptr_to());
436 bcx = lval.store_to(bcx, base);
438 Store(bcx, ll_len, get_len(bcx, scratch.val));
439 DatumBlock::new(bcx, scratch.to_expr_datum())
442 fn unsize_unique_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
444 datum: Datum<'tcx, Expr>,
445 k: &ty::UnsizeKind<'tcx>)
446 -> DatumBlock<'blk, 'tcx, Expr> {
450 let datum_ty = datum.ty;
451 let unboxed_ty = match datum_ty.sty {
453 _ => bcx.sess().bug(format!("Expected ty_uniq, found {}",
454 bcx.ty_to_string(datum_ty))[])
456 let result_ty = ty::mk_uniq(tcx, ty::unsize_ty(tcx, unboxed_ty, k, expr.span));
458 let lval = unpack_datum!(bcx,
459 datum.to_lvalue_datum(bcx, "unsize_unique_expr", expr.id));
461 let scratch = rvalue_scratch_datum(bcx, result_ty, "__uniq_fat_ptr");
462 let llbox_ty = type_of::type_of(bcx.ccx(), datum_ty);
463 let base = PointerCast(bcx, get_dataptr(bcx, scratch.val), llbox_ty.ptr_to());
464 bcx = lval.store_to(bcx, base);
466 let info = unsized_info(bcx, k, expr.id, unboxed_ty, |t| ty::mk_uniq(tcx, t));
467 Store(bcx, info, get_len(bcx, scratch.val));
469 let scratch = unpack_datum!(bcx,
470 scratch.to_expr_datum().to_lvalue_datum(bcx,
471 "fresh_uniq_fat_ptr",
474 DatumBlock::new(bcx, scratch.to_expr_datum())
478 /// Translates an expression in "lvalue" mode -- meaning that it returns a reference to the memory
479 /// that the expr represents.
481 /// If this expression is an rvalue, this implies introducing a temporary. In other words,
482 /// something like `x().f` is translated into roughly the equivalent of
484 /// { tmp = x(); tmp.f }
485 pub fn trans_to_lvalue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
488 -> DatumBlock<'blk, 'tcx, Lvalue> {
490 let datum = unpack_datum!(bcx, trans(bcx, expr));
491 return datum.to_lvalue_datum(bcx, name, expr.id);
494 /// A version of `trans` that ignores adjustments. You almost certainly do not want to call this
496 fn trans_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
498 -> DatumBlock<'blk, 'tcx, Expr> {
501 debug!("trans_unadjusted(expr={})", bcx.expr_to_string(expr));
502 let _indenter = indenter();
504 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
506 return match ty::expr_kind(bcx.tcx(), expr) {
507 ty::LvalueExpr | ty::RvalueDatumExpr => {
508 let datum = unpack_datum!(bcx, {
509 trans_datum_unadjusted(bcx, expr)
512 DatumBlock {bcx: bcx, datum: datum}
515 ty::RvalueStmtExpr => {
516 bcx = trans_rvalue_stmt_unadjusted(bcx, expr);
517 nil(bcx, expr_ty(bcx, expr))
520 ty::RvalueDpsExpr => {
521 let ty = expr_ty(bcx, expr);
522 if type_is_zero_size(bcx.ccx(), ty) {
523 bcx = trans_rvalue_dps_unadjusted(bcx, expr, Ignore);
526 let scratch = rvalue_scratch_datum(bcx, ty, "");
527 bcx = trans_rvalue_dps_unadjusted(
528 bcx, expr, SaveIn(scratch.val));
530 // Note: this is not obviously a good idea. It causes
531 // immediate values to be loaded immediately after a
532 // return from a call or other similar expression,
533 // which in turn leads to alloca's having shorter
534 // lifetimes and hence larger stack frames. However,
535 // in turn it can lead to more register pressure.
536 // Still, in practice it seems to increase
537 // performance, since we have fewer problems with
539 let scratch = unpack_datum!(
540 bcx, scratch.to_appropriate_datum(bcx));
542 DatumBlock::new(bcx, scratch.to_expr_datum())
547 fn nil<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ty: Ty<'tcx>)
548 -> DatumBlock<'blk, 'tcx, Expr> {
549 let llval = C_undef(type_of::type_of(bcx.ccx(), ty));
550 let datum = immediate_rvalue(llval, ty);
551 DatumBlock::new(bcx, datum.to_expr_datum())
555 fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
557 -> DatumBlock<'blk, 'tcx, Expr> {
560 let _icx = push_ctxt("trans_datum_unadjusted");
563 ast::ExprParen(ref e) => {
566 ast::ExprPath(_) => {
567 trans_def(bcx, expr, bcx.def(expr.id))
569 ast::ExprField(ref base, ident) => {
570 trans_rec_field(bcx, &**base, ident.node)
572 ast::ExprTupField(ref base, idx) => {
573 trans_rec_tup_field(bcx, &**base, idx.node)
575 ast::ExprIndex(ref base, ref idx) => {
577 ast::ExprRange(ref start, ref end) => {
578 // Special case for slicing syntax (KILLME).
579 let _icx = push_ctxt("trans_slice");
582 let method_call = MethodCall::expr(expr.id);
583 let method_ty = ccx.tcx()
587 .map(|method| method.ty);
588 let base_datum = unpack_datum!(bcx, trans(bcx, &**base));
590 let mut args = vec![];
591 start.as_ref().map(|e| args.push((unpack_datum!(bcx, trans(bcx, &**e)), e.id)));
592 end.as_ref().map(|e| args.push((unpack_datum!(bcx, trans(bcx, &**e)), e.id)));
594 let result_ty = ty::ty_fn_ret(monomorphize_type(bcx,
595 method_ty.unwrap())).unwrap();
596 let scratch = rvalue_scratch_datum(bcx, result_ty, "trans_slice");
599 trans_overloaded_op(bcx,
604 Some(SaveIn(scratch.val)),
606 DatumBlock::new(bcx, scratch.to_expr_datum())
608 _ => trans_index(bcx, expr, &**base, &**idx, MethodCall::expr(expr.id))
611 ast::ExprBox(_, ref contents) => {
612 // Special case for `Box<T>`
613 let box_ty = expr_ty(bcx, expr);
614 let contents_ty = expr_ty(bcx, &**contents);
617 trans_uniq_expr(bcx, box_ty, &**contents, contents_ty)
619 _ => bcx.sess().span_bug(expr.span,
620 "expected unique box")
624 ast::ExprLit(ref lit) => trans_immediate_lit(bcx, expr, &**lit),
625 ast::ExprBinary(op, ref lhs, ref rhs) => {
626 trans_binary(bcx, expr, op, &**lhs, &**rhs)
628 ast::ExprUnary(op, ref x) => {
629 trans_unary(bcx, expr, op, &**x)
631 ast::ExprAddrOf(_, ref x) => {
633 ast::ExprRepeat(..) | ast::ExprVec(..) => {
634 // Special case for slices.
635 let cleanup_debug_loc =
636 debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
640 fcx.push_ast_cleanup_scope(cleanup_debug_loc);
641 let datum = unpack_datum!(
642 bcx, tvec::trans_slice_vec(bcx, expr, &**x));
643 bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, x.id);
644 DatumBlock::new(bcx, datum)
647 trans_addr_of(bcx, expr, &**x)
651 ast::ExprCast(ref val, _) => {
652 // Datum output mode means this is a scalar cast:
653 trans_imm_cast(bcx, &**val, expr.id)
656 bcx.tcx().sess.span_bug(
658 format!("trans_rvalue_datum_unadjusted reached \
659 fall-through case: {}",
665 fn trans_field<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
668 -> DatumBlock<'blk, 'tcx, Expr> where
669 F: FnOnce(&'blk ty::ctxt<'tcx>, &[ty::field<'tcx>]) -> uint,
672 let _icx = push_ctxt("trans_rec_field");
674 let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, base, "field"));
675 let bare_ty = ty::unopen_type(base_datum.ty);
676 let repr = adt::represent_type(bcx.ccx(), bare_ty);
677 with_field_tys(bcx.tcx(), bare_ty, None, move |discr, field_tys| {
678 let ix = get_idx(bcx.tcx(), field_tys);
679 let d = base_datum.get_element(
682 |srcval| adt::trans_field_ptr(bcx, &*repr, srcval, discr, ix));
684 if type_is_sized(bcx.tcx(), d.ty) {
685 DatumBlock { datum: d.to_expr_datum(), bcx: bcx }
687 let scratch = rvalue_scratch_datum(bcx, ty::mk_open(bcx.tcx(), d.ty), "");
688 Store(bcx, d.val, get_dataptr(bcx, scratch.val));
689 let info = Load(bcx, get_len(bcx, base_datum.val));
690 Store(bcx, info, get_len(bcx, scratch.val));
692 DatumBlock::new(bcx, scratch.to_expr_datum())
699 /// Translates `base.field`.
700 fn trans_rec_field<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
703 -> DatumBlock<'blk, 'tcx, Expr> {
704 trans_field(bcx, base, |tcx, field_tys| ty::field_idx_strict(tcx, field.name, field_tys))
707 /// Translates `base.<idx>`.
708 fn trans_rec_tup_field<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
711 -> DatumBlock<'blk, 'tcx, Expr> {
712 trans_field(bcx, base, |_, _| idx)
715 fn trans_index<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
716 index_expr: &ast::Expr,
719 method_call: MethodCall)
720 -> DatumBlock<'blk, 'tcx, Expr> {
721 //! Translates `base[idx]`.
723 let _icx = push_ctxt("trans_index");
727 // Check for overloaded index.
728 let method_ty = ccx.tcx()
732 .map(|method| method.ty);
733 let elt_datum = match method_ty {
735 let base_datum = unpack_datum!(bcx, trans(bcx, base));
737 // Translate index expression.
738 let ix_datum = unpack_datum!(bcx, trans(bcx, idx));
740 let ref_ty = ty::ty_fn_ret(monomorphize_type(bcx, method_ty)).unwrap();
741 let elt_ty = match ty::deref(ref_ty, true) {
743 bcx.tcx().sess.span_bug(index_expr.span,
744 "index method didn't return a \
745 dereferenceable type?!")
747 Some(elt_tm) => elt_tm.ty,
750 // Overloaded. Evaluate `trans_overloaded_op`, which will
751 // invoke the user's index() method, which basically yields
752 // a `&T` pointer. We can then proceed down the normal
753 // path (below) to dereference that `&T`.
754 let scratch = rvalue_scratch_datum(bcx, ref_ty, "overloaded_index_elt");
756 trans_overloaded_op(bcx,
760 vec![(ix_datum, idx.id)],
761 Some(SaveIn(scratch.val)),
763 let datum = scratch.to_expr_datum();
764 if type_is_sized(bcx.tcx(), elt_ty) {
765 Datum::new(datum.to_llscalarish(bcx), elt_ty, LvalueExpr)
767 Datum::new(datum.val, ty::mk_open(bcx.tcx(), elt_ty), LvalueExpr)
771 let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx,
775 // Translate index expression and cast to a suitable LLVM integer.
776 // Rust is less strict than LLVM in this regard.
777 let ix_datum = unpack_datum!(bcx, trans(bcx, idx));
778 let ix_val = ix_datum.to_llscalarish(bcx);
779 let ix_size = machine::llbitsize_of_real(bcx.ccx(),
781 let int_size = machine::llbitsize_of_real(bcx.ccx(),
784 if ix_size < int_size {
785 if ty::type_is_signed(expr_ty(bcx, idx)) {
786 SExt(bcx, ix_val, ccx.int_type())
787 } else { ZExt(bcx, ix_val, ccx.int_type()) }
788 } else if ix_size > int_size {
789 Trunc(bcx, ix_val, ccx.int_type())
797 ty::sequence_element_type(bcx.tcx(),
799 base::maybe_name_value(bcx.ccx(), vt.llunit_size, "unit_sz");
801 let (base, len) = base_datum.get_vec_base_and_len(bcx);
803 debug!("trans_index: base {}", bcx.val_to_string(base));
804 debug!("trans_index: len {}", bcx.val_to_string(len));
806 let bounds_check = ICmp(bcx, llvm::IntUGE, ix_val, len);
807 let expect = ccx.get_intrinsic(&("llvm.expect.i1"));
808 let expected = Call(bcx,
810 &[bounds_check, C_bool(ccx, false)],
812 bcx = with_cond(bcx, expected, |bcx| {
813 controlflow::trans_fail_bounds_check(bcx,
818 let elt = InBoundsGEP(bcx, base, &[ix_val]);
819 let elt = PointerCast(bcx, elt, vt.llunit_ty.ptr_to());
820 Datum::new(elt, vt.unit_ty, LvalueExpr)
824 DatumBlock::new(bcx, elt_datum)
827 fn trans_def<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
828 ref_expr: &ast::Expr,
830 -> DatumBlock<'blk, 'tcx, Expr> {
831 //! Translates a reference to a path.
833 let _icx = push_ctxt("trans_def_lvalue");
835 def::DefFn(..) | def::DefStaticMethod(..) | def::DefMethod(..) |
836 def::DefStruct(_) | def::DefVariant(..) => {
837 let datum = trans_def_fn_unadjusted(bcx.ccx(), ref_expr, def,
838 bcx.fcx.param_substs);
839 DatumBlock::new(bcx, datum.to_expr_datum())
841 def::DefStatic(did, _) => {
842 // There are two things that may happen here:
843 // 1) If the static item is defined in this crate, it will be
844 // translated using `get_item_val`, and we return a pointer to
846 // 2) If the static item is defined in another crate then we add
847 // (or reuse) a declaration of an external global, and return a
849 let const_ty = expr_ty(bcx, ref_expr);
851 fn get_val<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, did: ast::DefId,
852 const_ty: Ty<'tcx>) -> ValueRef {
853 // For external constants, we don't inline.
854 if did.krate == ast::LOCAL_CRATE {
857 // The LLVM global has the type of its initializer,
858 // which may not be equal to the enum's type for
860 let val = base::get_item_val(bcx.ccx(), did.node);
861 let pty = type_of::type_of(bcx.ccx(), const_ty).ptr_to();
862 PointerCast(bcx, val, pty)
865 base::get_extern_const(bcx.ccx(), did, const_ty)
868 let val = get_val(bcx, did, const_ty);
869 DatumBlock::new(bcx, Datum::new(val, const_ty, LvalueExpr))
871 def::DefConst(did) => {
872 // First, inline any external constants into the local crate so we
873 // can be sure to get the LLVM value corresponding to it.
874 let did = inline::maybe_instantiate_inline(bcx.ccx(), did);
875 if did.krate != ast::LOCAL_CRATE {
876 bcx.tcx().sess.span_bug(ref_expr.span,
877 "cross crate constant could not \
880 let val = base::get_item_val(bcx.ccx(), did.node);
882 // Next, we need to crate a ByRef rvalue datum to return. We can't
883 // use the normal .to_ref_datum() function because the type of
884 // `val` is not actually the same as `const_ty`.
886 // To get around this, we make a custom alloca slot with the
887 // appropriate type (const_ty), and then we cast it to a pointer of
888 // typeof(val), store the value, and then hand this slot over to
889 // the datum infrastructure.
890 let const_ty = expr_ty(bcx, ref_expr);
891 let llty = type_of::type_of(bcx.ccx(), const_ty);
892 let slot = alloca(bcx, llty, "const");
893 let pty = Type::from_ref(unsafe { llvm::LLVMTypeOf(val) }).ptr_to();
894 Store(bcx, val, PointerCast(bcx, slot, pty));
896 let datum = Datum::new(slot, const_ty, Rvalue::new(ByRef));
897 DatumBlock::new(bcx, datum.to_expr_datum())
900 DatumBlock::new(bcx, trans_local_var(bcx, def).to_expr_datum())
905 fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
907 -> Block<'blk, 'tcx> {
909 let _icx = push_ctxt("trans_rvalue_stmt");
911 if bcx.unreachable.get() {
915 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
918 ast::ExprParen(ref e) => {
919 trans_into(bcx, &**e, Ignore)
921 ast::ExprBreak(label_opt) => {
922 controlflow::trans_break(bcx, expr.id, label_opt)
924 ast::ExprAgain(label_opt) => {
925 controlflow::trans_cont(bcx, expr.id, label_opt)
927 ast::ExprRet(ref ex) => {
928 // Check to see if the return expression itself is reachable.
929 // This can occur when the inner expression contains a return
930 let reachable = if let Some(ref cfg) = bcx.fcx.cfg {
931 cfg.node_is_reachable(expr.id)
937 controlflow::trans_ret(bcx, ex.as_ref().map(|e| &**e))
939 // If it's not reachable, just translate the inner expression
940 // directly. This avoids having to manage a return slot when
941 // it won't actually be used anyway.
942 if let &Some(ref x) = ex {
943 bcx = trans_into(bcx, &**x, Ignore);
945 // Mark the end of the block as unreachable. Once we get to
946 // a return expression, there's no more we should be doing
952 ast::ExprWhile(ref cond, ref body, _) => {
953 controlflow::trans_while(bcx, expr.id, &**cond, &**body)
955 ast::ExprForLoop(ref pat, ref head, ref body, _) => {
956 controlflow::trans_for(bcx,
962 ast::ExprLoop(ref body, _) => {
963 controlflow::trans_loop(bcx, expr.id, &**body)
965 ast::ExprAssign(ref dst, ref src) => {
966 let src_datum = unpack_datum!(bcx, trans(bcx, &**src));
967 let dst_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, &**dst, "assign"));
969 if type_needs_drop(bcx.tcx(), dst_datum.ty) {
970 // If there are destructors involved, make sure we
971 // are copying from an rvalue, since that cannot possible
972 // alias an lvalue. We are concerned about code like:
980 // where e.g. a : Option<Foo> and a.b :
981 // Option<Foo>. In that case, freeing `a` before the
982 // assignment may also free `a.b`!
984 // We could avoid this intermediary with some analysis
985 // to determine whether `dst` may possibly own `src`.
986 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
987 let src_datum = unpack_datum!(
988 bcx, src_datum.to_rvalue_datum(bcx, "ExprAssign"));
989 bcx = glue::drop_ty(bcx,
992 Some(NodeInfo { id: expr.id, span: expr.span }));
993 src_datum.store_to(bcx, dst_datum.val)
995 src_datum.store_to(bcx, dst_datum.val)
998 ast::ExprAssignOp(op, ref dst, ref src) => {
999 trans_assign_op(bcx, expr, op, &**dst, &**src)
1001 ast::ExprInlineAsm(ref a) => {
1002 asm::trans_inline_asm(bcx, a)
1005 bcx.tcx().sess.span_bug(
1007 format!("trans_rvalue_stmt_unadjusted reached \
1008 fall-through case: {}",
1014 fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1017 -> Block<'blk, 'tcx> {
1018 let _icx = push_ctxt("trans_rvalue_dps_unadjusted");
1020 let tcx = bcx.tcx();
1022 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
1025 ast::ExprParen(ref e) => {
1026 trans_into(bcx, &**e, dest)
1028 ast::ExprPath(_) => {
1029 trans_def_dps_unadjusted(bcx, expr, bcx.def(expr.id), dest)
1031 ast::ExprIf(ref cond, ref thn, ref els) => {
1032 controlflow::trans_if(bcx, expr.id, &**cond, &**thn, els.as_ref().map(|e| &**e), dest)
1034 ast::ExprMatch(ref discr, ref arms, _) => {
1035 _match::trans_match(bcx, expr, &**discr, arms[], dest)
1037 ast::ExprBlock(ref blk) => {
1038 controlflow::trans_block(bcx, &**blk, dest)
1040 ast::ExprStruct(_, ref fields, ref base) => {
1043 base.as_ref().map(|e| &**e),
1046 node_id_type(bcx, expr.id),
1049 ast::ExprRange(ref start, ref end) => {
1050 // FIXME it is just not right that we are synthesising ast nodes in
1052 fn make_field(field_name: &str, expr: P<ast::Expr>) -> ast::Field {
1054 ident: codemap::dummy_spanned(token::str_to_ident(field_name)),
1056 span: codemap::DUMMY_SP,
1060 // A range just desugars into a struct.
1061 // Note that the type of the start and end may not be the same, but
1062 // they should only differ in their lifetime, which should not matter
1064 let (did, fields, ty_params) = match (start, end) {
1065 (&Some(ref start), &Some(ref end)) => {
1067 let fields = vec![make_field("start", start.clone()),
1068 make_field("end", end.clone())];
1069 (tcx.lang_items.range_struct(), fields, vec![node_id_type(bcx, start.id)])
1071 (&Some(ref start), &None) => {
1072 // Desugar to RangeFrom
1073 let fields = vec![make_field("start", start.clone())];
1074 (tcx.lang_items.range_from_struct(), fields, vec![node_id_type(bcx, start.id)])
1076 (&None, &Some(ref end)) => {
1077 // Desugar to RangeTo
1078 let fields = vec![make_field("end", end.clone())];
1079 (tcx.lang_items.range_to_struct(), fields, vec![node_id_type(bcx, end.id)])
1082 // Desugar to FullRange
1083 (tcx.lang_items.full_range_struct(), vec![], vec![])
1087 if let Some(did) = did {
1088 let substs = Substs::new_type(ty_params, vec![]);
1094 ty::mk_struct(tcx, did, tcx.mk_substs(substs)),
1097 tcx.sess.span_bug(expr.span,
1098 "No lang item for ranges (how did we get this far?)")
1101 ast::ExprTup(ref args) => {
1102 let numbered_fields: Vec<(uint, &ast::Expr)> =
1103 args.iter().enumerate().map(|(i, arg)| (i, &**arg)).collect();
1110 Some(NodeInfo { id: expr.id, span: expr.span }))
1112 ast::ExprLit(ref lit) => {
1114 ast::LitStr(ref s, _) => {
1115 tvec::trans_lit_str(bcx, expr, (*s).clone(), dest)
1120 .span_bug(expr.span,
1121 "trans_rvalue_dps_unadjusted shouldn't be \
1122 translating this type of literal")
1126 ast::ExprVec(..) | ast::ExprRepeat(..) => {
1127 tvec::trans_fixed_vstore(bcx, expr, dest)
1129 ast::ExprClosure(_, _, ref decl, ref body) => {
1130 // Check the side-table to see whether this is an unboxed
1131 // closure or an older, legacy style closure. Store this
1132 // into a variable to ensure the the RefCell-lock is
1133 // released before we recurse.
1134 let is_unboxed_closure =
1135 bcx.tcx().unboxed_closures.borrow().contains_key(&ast_util::local_def(expr.id));
1136 if is_unboxed_closure {
1137 closure::trans_unboxed_closure(bcx, &**decl, &**body, expr.id, dest)
1139 let expr_ty = expr_ty(bcx, expr);
1140 let store = ty::ty_closure_store(expr_ty);
1141 debug!("translating block function {} with type {}",
1142 expr_to_string(expr), expr_ty.repr(tcx));
1143 closure::trans_expr_fn(bcx, store, &**decl, &**body, expr.id, dest)
1146 ast::ExprCall(ref f, ref args) => {
1147 if bcx.tcx().is_method_call(expr.id) {
1148 trans_overloaded_call(bcx,
1154 callee::trans_call(bcx,
1157 callee::ArgExprs(args[]),
1161 ast::ExprMethodCall(_, _, ref args) => {
1162 callee::trans_method_call(bcx,
1165 callee::ArgExprs(args[]),
1168 ast::ExprBinary(op, ref lhs, ref rhs) => {
1169 // if not overloaded, would be RvalueDatumExpr
1170 let lhs = unpack_datum!(bcx, trans(bcx, &**lhs));
1171 let rhs_datum = unpack_datum!(bcx, trans(bcx, &**rhs));
1172 trans_overloaded_op(bcx, expr, MethodCall::expr(expr.id), lhs,
1173 vec![(rhs_datum, rhs.id)], Some(dest),
1174 !ast_util::is_by_value_binop(op)).bcx
1176 ast::ExprUnary(op, ref subexpr) => {
1177 // if not overloaded, would be RvalueDatumExpr
1178 let arg = unpack_datum!(bcx, trans(bcx, &**subexpr));
1179 trans_overloaded_op(bcx, expr, MethodCall::expr(expr.id),
1180 arg, Vec::new(), Some(dest), !ast_util::is_by_value_unop(op)).bcx
1182 ast::ExprIndex(ref base, ref idx) => {
1183 // if not overloaded, would be RvalueDatumExpr
1184 let base = unpack_datum!(bcx, trans(bcx, &**base));
1185 let idx_datum = unpack_datum!(bcx, trans(bcx, &**idx));
1186 trans_overloaded_op(bcx, expr, MethodCall::expr(expr.id), base,
1187 vec![(idx_datum, idx.id)], Some(dest), true).bcx
1189 ast::ExprCast(ref val, _) => {
1190 // DPS output mode means this is a trait cast:
1191 if ty::type_is_trait(node_id_type(bcx, expr.id)) {
1193 bcx.tcx().object_cast_map.borrow()
1195 .map(|t| (*t).clone())
1197 let trait_ref = bcx.monomorphize(&trait_ref);
1198 let datum = unpack_datum!(bcx, trans(bcx, &**val));
1199 meth::trans_trait_cast(bcx, datum, expr.id,
1202 bcx.tcx().sess.span_bug(expr.span,
1203 "expr_cast of non-trait");
1206 ast::ExprAssignOp(op, ref dst, ref src) => {
1207 trans_assign_op(bcx, expr, op, &**dst, &**src)
1210 bcx.tcx().sess.span_bug(
1212 format!("trans_rvalue_dps_unadjusted reached fall-through \
1219 fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1220 ref_expr: &ast::Expr,
1223 -> Block<'blk, 'tcx> {
1224 let _icx = push_ctxt("trans_def_dps_unadjusted");
1226 let lldest = match dest {
1227 SaveIn(lldest) => lldest,
1228 Ignore => { return bcx; }
1232 def::DefVariant(tid, vid, _) => {
1233 let variant_info = ty::enum_variant_with_id(bcx.tcx(), tid, vid);
1234 if variant_info.args.len() > 0u {
1236 let llfn = callee::trans_fn_ref(bcx.ccx(), vid,
1237 ExprId(ref_expr.id),
1238 bcx.fcx.param_substs).val;
1239 Store(bcx, llfn, lldest);
1243 let ty = expr_ty(bcx, ref_expr);
1244 let repr = adt::represent_type(bcx.ccx(), ty);
1245 adt::trans_set_discr(bcx, &*repr, lldest,
1246 variant_info.disr_val);
1250 def::DefStruct(_) => {
1251 let ty = expr_ty(bcx, ref_expr);
1253 ty::ty_struct(did, _) if ty::has_dtor(bcx.tcx(), did) => {
1254 let repr = adt::represent_type(bcx.ccx(), ty);
1255 adt::trans_set_discr(bcx, &*repr, lldest, 0);
1262 bcx.tcx().sess.span_bug(ref_expr.span, format!(
1263 "Non-DPS def {} referened by {}",
1264 def, bcx.node_id_to_string(ref_expr.id))[]);
1269 pub fn trans_def_fn_unadjusted<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1270 ref_expr: &ast::Expr,
1272 param_substs: &subst::Substs<'tcx>)
1273 -> Datum<'tcx, Rvalue> {
1274 let _icx = push_ctxt("trans_def_datum_unadjusted");
1277 def::DefFn(did, _) |
1278 def::DefStruct(did) | def::DefVariant(_, did, _) |
1279 def::DefStaticMethod(did, def::FromImpl(_)) |
1280 def::DefMethod(did, _, def::FromImpl(_)) => {
1281 callee::trans_fn_ref(ccx, did, ExprId(ref_expr.id), param_substs)
1283 def::DefStaticMethod(impl_did, def::FromTrait(trait_did)) |
1284 def::DefMethod(impl_did, _, def::FromTrait(trait_did)) => {
1285 meth::trans_static_method_callee(ccx, impl_did,
1286 trait_did, ref_expr.id,
1290 ccx.tcx().sess.span_bug(ref_expr.span, format!(
1291 "trans_def_fn_unadjusted invoked on: {} for {}",
1293 ref_expr.repr(ccx.tcx()))[]);
1298 /// Translates a reference to a local variable or argument. This always results in an lvalue datum.
1299 pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1301 -> Datum<'tcx, Lvalue> {
1302 let _icx = push_ctxt("trans_local_var");
1305 def::DefUpvar(nid, _, _) => {
1306 // Can't move upvars, so this is never a ZeroMemLastUse.
1307 let local_ty = node_id_type(bcx, nid);
1308 match bcx.fcx.llupvars.borrow().get(&nid) {
1309 Some(&val) => Datum::new(val, local_ty, Lvalue),
1311 bcx.sess().bug(format!(
1312 "trans_local_var: no llval for upvar {} found",
1317 def::DefLocal(nid) => {
1318 let datum = match bcx.fcx.lllocals.borrow().get(&nid) {
1321 bcx.sess().bug(format!(
1322 "trans_local_var: no datum for local/arg {} found",
1326 debug!("take_local(nid={}, v={}, ty={})",
1327 nid, bcx.val_to_string(datum.val), bcx.ty_to_string(datum.ty));
1331 bcx.sess().unimpl(format!(
1332 "unsupported def type in trans_local_var: {}",
1338 /// Helper for enumerating the field types of structs, enums, or records. The optional node ID here
1339 /// is the node ID of the path identifying the enum variant in use. If none, this cannot possibly
1340 /// an enum variant (so, if it is and `node_id_opt` is none, this function panics).
1341 pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>,
1343 node_id_opt: Option<ast::NodeId>,
1346 F: FnOnce(ty::Disr, &[ty::field<'tcx>]) -> R,
1349 ty::ty_struct(did, substs) => {
1350 op(0, struct_fields(tcx, did, substs)[])
1353 ty::ty_tup(ref v) => {
1354 op(0, tup_fields(v[])[])
1357 ty::ty_enum(_, substs) => {
1358 // We want the *variant* ID here, not the enum ID.
1361 tcx.sess.bug(format!(
1362 "cannot get field types from the enum type {} \
1367 let def = tcx.def_map.borrow()[node_id].clone();
1369 def::DefVariant(enum_id, variant_id, _) => {
1370 let variant_info = ty::enum_variant_with_id(
1371 tcx, enum_id, variant_id);
1372 op(variant_info.disr_val,
1378 tcx.sess.bug("resolve didn't map this expr to a \
1387 tcx.sess.bug(format!(
1388 "cannot get field types from the type {}",
1394 fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1395 fields: &[ast::Field],
1396 base: Option<&ast::Expr>,
1397 expr_span: codemap::Span,
1398 expr_id: ast::NodeId,
1400 dest: Dest) -> Block<'blk, 'tcx> {
1401 let _icx = push_ctxt("trans_rec");
1403 let tcx = bcx.tcx();
1404 with_field_tys(tcx, ty, Some(expr_id), |discr, field_tys| {
1405 let mut need_base: Vec<_> = repeat(true).take(field_tys.len()).collect();
1407 let numbered_fields = fields.iter().map(|field| {
1409 field_tys.iter().position(|field_ty|
1410 field_ty.name == field.ident.node.name);
1413 need_base[i] = false;
1417 tcx.sess.span_bug(field.span,
1418 "Couldn't find field in struct type")
1421 }).collect::<Vec<_>>();
1422 let optbase = match base {
1423 Some(base_expr) => {
1424 let mut leftovers = Vec::new();
1425 for (i, b) in need_base.iter().enumerate() {
1427 leftovers.push((i, field_tys[i].mt.ty))
1430 Some(StructBaseInfo {expr: base_expr,
1431 fields: leftovers })
1434 if need_base.iter().any(|b| *b) {
1435 tcx.sess.span_bug(expr_span, "missing fields and no base expr")
1447 Some(NodeInfo { id: expr_id, span: expr_span }))
1451 /// Information that `trans_adt` needs in order to fill in the fields
1452 /// of a struct copied from a base struct (e.g., from an expression
1453 /// like `Foo { a: b, ..base }`.
1455 /// Note that `fields` may be empty; the base expression must always be
1456 /// evaluated for side-effects.
1457 pub struct StructBaseInfo<'a, 'tcx> {
1458 /// The base expression; will be evaluated after all explicit fields.
1459 expr: &'a ast::Expr,
1460 /// The indices of fields to copy paired with their types.
1461 fields: Vec<(uint, Ty<'tcx>)>
1464 /// Constructs an ADT instance:
1466 /// - `fields` should be a list of field indices paired with the
1467 /// expression to store into that field. The initializers will be
1468 /// evaluated in the order specified by `fields`.
1470 /// - `optbase` contains information on the base struct (if any) from
1471 /// which remaining fields are copied; see comments on `StructBaseInfo`.
1472 pub fn trans_adt<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1475 fields: &[(uint, &ast::Expr)],
1476 optbase: Option<StructBaseInfo<'a, 'tcx>>,
1478 source_location: Option<NodeInfo>)
1479 -> Block<'blk, 'tcx> {
1480 let _icx = push_ctxt("trans_adt");
1482 let repr = adt::represent_type(bcx.ccx(), ty);
1484 match source_location {
1485 Some(src_loc) => debuginfo::set_source_location(bcx.fcx,
1491 // If we don't care about the result, just make a
1492 // temporary stack slot
1493 let addr = match dest {
1495 Ignore => alloc_ty(bcx, ty, "temp"),
1498 // This scope holds intermediates that must be cleaned should
1499 // panic occur before the ADT as a whole is ready.
1500 let custom_cleanup_scope = fcx.push_custom_cleanup_scope();
1502 // First we trans the base, if we have one, to the dest
1503 for base in optbase.iter() {
1504 assert_eq!(discr, 0);
1506 match ty::expr_kind(bcx.tcx(), &*base.expr) {
1507 ty::RvalueDpsExpr | ty::RvalueDatumExpr if !type_needs_drop(bcx.tcx(), ty) => {
1508 bcx = trans_into(bcx, &*base.expr, SaveIn(addr));
1510 ty::RvalueStmtExpr => bcx.tcx().sess.bug("unexpected expr kind for struct base expr"),
1512 let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, &*base.expr, "base"));
1513 for &(i, t) in base.fields.iter() {
1514 let datum = base_datum.get_element(
1515 bcx, t, |srcval| adt::trans_field_ptr(bcx, &*repr, srcval, discr, i));
1516 assert!(type_is_sized(bcx.tcx(), datum.ty));
1517 let dest = adt::trans_field_ptr(bcx, &*repr, addr, discr, i);
1518 bcx = datum.store_to(bcx, dest);
1524 match source_location {
1525 Some(src_loc) => debuginfo::set_source_location(bcx.fcx,
1531 if ty::type_is_simd(bcx.tcx(), ty) {
1532 // This is the constructor of a SIMD type, such types are
1533 // always primitive machine types and so do not have a
1534 // destructor or require any clean-up.
1535 let llty = type_of::type_of(bcx.ccx(), ty);
1537 // keep a vector as a register, and running through the field
1538 // `insertelement`ing them directly into that register
1539 // (i.e. avoid GEPi and `store`s to an alloca) .
1540 let mut vec_val = C_undef(llty);
1542 for &(i, ref e) in fields.iter() {
1543 let block_datum = trans(bcx, &**e);
1544 bcx = block_datum.bcx;
1545 let position = C_uint(bcx.ccx(), i);
1546 let value = block_datum.datum.to_llscalarish(bcx);
1547 vec_val = InsertElement(bcx, vec_val, value, position);
1549 Store(bcx, vec_val, addr);
1551 // Now, we just overwrite the fields we've explicitly specified
1552 for &(i, ref e) in fields.iter() {
1553 let dest = adt::trans_field_ptr(bcx, &*repr, addr, discr, i);
1554 let e_ty = expr_ty_adjusted(bcx, &**e);
1555 bcx = trans_into(bcx, &**e, SaveIn(dest));
1556 let scope = cleanup::CustomScope(custom_cleanup_scope);
1557 fcx.schedule_lifetime_end(scope, dest);
1558 fcx.schedule_drop_mem(scope, dest, e_ty);
1562 adt::trans_set_discr(bcx, &*repr, addr, discr);
1564 fcx.pop_custom_cleanup_scope(custom_cleanup_scope);
1566 // If we don't care about the result drop the temporary we made
1570 bcx = glue::drop_ty(bcx, addr, ty, source_location);
1571 base::call_lifetime_end(bcx, addr);
1578 fn trans_immediate_lit<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1581 -> DatumBlock<'blk, 'tcx, Expr> {
1582 // must not be a string constant, that is a RvalueDpsExpr
1583 let _icx = push_ctxt("trans_immediate_lit");
1584 let ty = expr_ty(bcx, expr);
1585 let v = consts::const_lit(bcx.ccx(), expr, lit);
1586 immediate_rvalue_bcx(bcx, v, ty).to_expr_datumblock()
1589 fn trans_unary<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1592 sub_expr: &ast::Expr)
1593 -> DatumBlock<'blk, 'tcx, Expr> {
1594 let ccx = bcx.ccx();
1596 let _icx = push_ctxt("trans_unary_datum");
1598 let method_call = MethodCall::expr(expr.id);
1600 // The only overloaded operator that is translated to a datum
1601 // is an overloaded deref, since it is always yields a `&T`.
1602 // Otherwise, we should be in the RvalueDpsExpr path.
1604 op == ast::UnDeref ||
1605 !ccx.tcx().method_map.borrow().contains_key(&method_call));
1607 let un_ty = expr_ty(bcx, expr);
1611 let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
1612 let llresult = Not(bcx, datum.to_llscalarish(bcx));
1613 immediate_rvalue_bcx(bcx, llresult, un_ty).to_expr_datumblock()
1616 let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
1617 let val = datum.to_llscalarish(bcx);
1619 if ty::type_is_fp(un_ty) {
1625 immediate_rvalue_bcx(bcx, llneg, un_ty).to_expr_datumblock()
1628 trans_uniq_expr(bcx, un_ty, sub_expr, expr_ty(bcx, sub_expr))
1631 let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
1632 deref_once(bcx, expr, datum, method_call)
1637 fn trans_uniq_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1639 contents: &ast::Expr,
1640 contents_ty: Ty<'tcx>)
1641 -> DatumBlock<'blk, 'tcx, Expr> {
1642 let _icx = push_ctxt("trans_uniq_expr");
1644 assert!(type_is_sized(bcx.tcx(), contents_ty));
1645 let llty = type_of::type_of(bcx.ccx(), contents_ty);
1646 let size = llsize_of(bcx.ccx(), llty);
1647 let align = C_uint(bcx.ccx(), type_of::align_of(bcx.ccx(), contents_ty));
1648 let llty_ptr = llty.ptr_to();
1649 let Result { bcx, val } = malloc_raw_dyn(bcx, llty_ptr, box_ty, size, align);
1650 // Unique boxes do not allocate for zero-size types. The standard library
1651 // may assume that `free` is never called on the pointer returned for
1652 // `Box<ZeroSizeType>`.
1653 let bcx = if llsize_of_alloc(bcx.ccx(), llty) == 0 {
1654 trans_into(bcx, contents, SaveIn(val))
1656 let custom_cleanup_scope = fcx.push_custom_cleanup_scope();
1657 fcx.schedule_free_value(cleanup::CustomScope(custom_cleanup_scope),
1658 val, cleanup::HeapExchange, contents_ty);
1659 let bcx = trans_into(bcx, contents, SaveIn(val));
1660 fcx.pop_custom_cleanup_scope(custom_cleanup_scope);
1663 immediate_rvalue_bcx(bcx, val, box_ty).to_expr_datumblock()
1666 fn trans_addr_of<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1668 subexpr: &ast::Expr)
1669 -> DatumBlock<'blk, 'tcx, Expr> {
1670 let _icx = push_ctxt("trans_addr_of");
1672 let sub_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, subexpr, "addr_of"));
1673 match sub_datum.ty.sty {
1675 // Opened DST value, close to a fat pointer
1676 debug!("Closing fat pointer {}", bcx.ty_to_string(sub_datum.ty));
1678 let scratch = rvalue_scratch_datum(bcx,
1679 ty::close_type(bcx.tcx(), sub_datum.ty),
1681 let base = Load(bcx, get_dataptr(bcx, sub_datum.val));
1682 Store(bcx, base, get_dataptr(bcx, scratch.val));
1684 let len = Load(bcx, get_len(bcx, sub_datum.val));
1685 Store(bcx, len, get_len(bcx, scratch.val));
1687 DatumBlock::new(bcx, scratch.to_expr_datum())
1690 // Sized value, ref to a thin pointer
1691 let ty = expr_ty(bcx, expr);
1692 immediate_rvalue_bcx(bcx, sub_datum.val, ty).to_expr_datumblock()
1697 // Important to get types for both lhs and rhs, because one might be _|_
1698 // and the other not.
1699 fn trans_eager_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1700 binop_expr: &ast::Expr,
1707 -> DatumBlock<'blk, 'tcx, Expr> {
1708 let _icx = push_ctxt("trans_eager_binop");
1710 let tcx = bcx.tcx();
1711 let is_simd = ty::type_is_simd(tcx, lhs_t);
1713 if is_simd { ty::simd_type(tcx, lhs_t) }
1716 let is_float = ty::type_is_fp(intype);
1717 let is_signed = ty::type_is_signed(intype);
1719 let rhs = base::cast_shift_expr_rhs(bcx, op, lhs, rhs);
1722 let val = match op {
1724 if is_float { FAdd(bcx, lhs, rhs) }
1725 else { Add(bcx, lhs, rhs) }
1728 if is_float { FSub(bcx, lhs, rhs) }
1729 else { Sub(bcx, lhs, rhs) }
1732 if is_float { FMul(bcx, lhs, rhs) }
1733 else { Mul(bcx, lhs, rhs) }
1739 // Only zero-check integers; fp /0 is NaN
1740 bcx = base::fail_if_zero_or_overflows(bcx, binop_expr.span,
1741 op, lhs, rhs, rhs_t);
1753 // Only zero-check integers; fp %0 is NaN
1754 bcx = base::fail_if_zero_or_overflows(bcx, binop_expr.span,
1755 op, lhs, rhs, rhs_t);
1763 ast::BiBitOr => Or(bcx, lhs, rhs),
1764 ast::BiBitAnd => And(bcx, lhs, rhs),
1765 ast::BiBitXor => Xor(bcx, lhs, rhs),
1766 ast::BiShl => Shl(bcx, lhs, rhs),
1770 } else { LShr(bcx, lhs, rhs) }
1772 ast::BiEq | ast::BiNe | ast::BiLt | ast::BiGe | ast::BiLe | ast::BiGt => {
1773 if ty::type_is_scalar(rhs_t) {
1774 unpack_result!(bcx, base::compare_scalar_types(bcx, lhs, rhs, rhs_t, op))
1776 base::compare_simd_types(bcx, lhs, rhs, intype, ty::simd_size(tcx, lhs_t), op)
1778 bcx.tcx().sess.span_bug(binop_expr.span, "comparison operator unsupported for type")
1782 bcx.tcx().sess.span_bug(binop_expr.span, "unexpected binop");
1786 immediate_rvalue_bcx(bcx, val, binop_ty).to_expr_datumblock()
1789 // refinement types would obviate the need for this
1790 enum lazy_binop_ty {
1795 fn trans_lazy_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1796 binop_expr: &ast::Expr,
1800 -> DatumBlock<'blk, 'tcx, Expr> {
1801 let _icx = push_ctxt("trans_lazy_binop");
1802 let binop_ty = expr_ty(bcx, binop_expr);
1805 let DatumBlock {bcx: past_lhs, datum: lhs} = trans(bcx, a);
1806 let lhs = lhs.to_llscalarish(past_lhs);
1808 if past_lhs.unreachable.get() {
1809 return immediate_rvalue_bcx(past_lhs, lhs, binop_ty).to_expr_datumblock();
1812 let join = fcx.new_id_block("join", binop_expr.id);
1813 let before_rhs = fcx.new_id_block("before_rhs", b.id);
1816 lazy_and => CondBr(past_lhs, lhs, before_rhs.llbb, join.llbb),
1817 lazy_or => CondBr(past_lhs, lhs, join.llbb, before_rhs.llbb)
1820 let DatumBlock {bcx: past_rhs, datum: rhs} = trans(before_rhs, b);
1821 let rhs = rhs.to_llscalarish(past_rhs);
1823 if past_rhs.unreachable.get() {
1824 return immediate_rvalue_bcx(join, lhs, binop_ty).to_expr_datumblock();
1827 Br(past_rhs, join.llbb);
1828 let phi = Phi(join, Type::i1(bcx.ccx()), &[lhs, rhs],
1829 &[past_lhs.llbb, past_rhs.llbb]);
1831 return immediate_rvalue_bcx(join, phi, binop_ty).to_expr_datumblock();
1834 fn trans_binary<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1839 -> DatumBlock<'blk, 'tcx, Expr> {
1840 let _icx = push_ctxt("trans_binary");
1841 let ccx = bcx.ccx();
1843 // if overloaded, would be RvalueDpsExpr
1844 assert!(!ccx.tcx().method_map.borrow().contains_key(&MethodCall::expr(expr.id)));
1848 trans_lazy_binop(bcx, expr, lazy_and, lhs, rhs)
1851 trans_lazy_binop(bcx, expr, lazy_or, lhs, rhs)
1855 let lhs_datum = unpack_datum!(bcx, trans(bcx, lhs));
1856 let rhs_datum = unpack_datum!(bcx, trans(bcx, rhs));
1857 let binop_ty = expr_ty(bcx, expr);
1859 debug!("trans_binary (expr {}): lhs_datum={}",
1861 lhs_datum.to_string(ccx));
1862 let lhs_ty = lhs_datum.ty;
1863 let lhs = lhs_datum.to_llscalarish(bcx);
1865 debug!("trans_binary (expr {}): rhs_datum={}",
1867 rhs_datum.to_string(ccx));
1868 let rhs_ty = rhs_datum.ty;
1869 let rhs = rhs_datum.to_llscalarish(bcx);
1870 trans_eager_binop(bcx, expr, binop_ty, op,
1871 lhs_ty, lhs, rhs_ty, rhs)
1876 fn trans_overloaded_op<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1878 method_call: MethodCall,
1879 lhs: Datum<'tcx, Expr>,
1880 rhs: Vec<(Datum<'tcx, Expr>, ast::NodeId)>,
1883 -> Result<'blk, 'tcx> {
1884 let method_ty = (*bcx.tcx().method_map.borrow())[method_call].ty;
1885 callee::trans_call_inner(bcx,
1886 Some(expr_info(expr)),
1887 monomorphize_type(bcx, method_ty),
1888 |bcx, arg_cleanup_scope| {
1889 meth::trans_method_callee(bcx,
1894 callee::ArgOverloadedOp(lhs, rhs, autoref),
1898 fn trans_overloaded_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1900 callee: &'a ast::Expr,
1901 args: &'a [P<ast::Expr>],
1903 -> Block<'blk, 'tcx> {
1904 let method_call = MethodCall::expr(expr.id);
1905 let method_type = (*bcx.tcx()
1907 .borrow())[method_call]
1909 let mut all_args = vec!(callee);
1910 all_args.extend(args.iter().map(|e| &**e));
1912 callee::trans_call_inner(bcx,
1913 Some(expr_info(expr)),
1914 monomorphize_type(bcx,
1916 |bcx, arg_cleanup_scope| {
1917 meth::trans_method_callee(
1923 callee::ArgOverloadedCall(all_args),
1928 fn int_cast(bcx: Block,
1934 let _icx = push_ctxt("int_cast");
1936 let srcsz = llvm::LLVMGetIntTypeWidth(llsrctype.to_ref());
1937 let dstsz = llvm::LLVMGetIntTypeWidth(lldsttype.to_ref());
1938 return if dstsz == srcsz {
1939 BitCast(bcx, llsrc, lldsttype)
1940 } else if srcsz > dstsz {
1941 TruncOrBitCast(bcx, llsrc, lldsttype)
1943 SExtOrBitCast(bcx, llsrc, lldsttype)
1945 ZExtOrBitCast(bcx, llsrc, lldsttype)
1950 fn float_cast(bcx: Block,
1955 let _icx = push_ctxt("float_cast");
1956 let srcsz = llsrctype.float_width();
1957 let dstsz = lldsttype.float_width();
1958 return if dstsz > srcsz {
1959 FPExt(bcx, llsrc, lldsttype)
1960 } else if srcsz > dstsz {
1961 FPTrunc(bcx, llsrc, lldsttype)
1965 #[derive(Copy, PartialEq, Show)]
1966 pub enum cast_kind {
1974 pub fn cast_type_kind<'tcx>(tcx: &ty::ctxt<'tcx>, t: Ty<'tcx>) -> cast_kind {
1976 ty::ty_char => cast_integral,
1977 ty::ty_float(..) => cast_float,
1978 ty::ty_rptr(_, mt) | ty::ty_ptr(mt) => {
1979 if type_is_sized(tcx, mt.ty) {
1985 ty::ty_bare_fn(..) => cast_pointer,
1986 ty::ty_int(..) => cast_integral,
1987 ty::ty_uint(..) => cast_integral,
1988 ty::ty_bool => cast_integral,
1989 ty::ty_enum(..) => cast_enum,
1994 fn cast_is_noop<'tcx>(t_in: Ty<'tcx>, t_out: Ty<'tcx>) -> bool {
1995 match (ty::deref(t_in, true), ty::deref(t_out, true)) {
1996 (Some(ty::mt{ ty: t_in, .. }), Some(ty::mt{ ty: t_out, .. })) => {
2003 fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2006 -> DatumBlock<'blk, 'tcx, Expr> {
2007 let _icx = push_ctxt("trans_cast");
2009 let ccx = bcx.ccx();
2011 let t_in = expr_ty(bcx, expr);
2012 let t_out = node_id_type(bcx, id);
2013 let k_in = cast_type_kind(bcx.tcx(), t_in);
2014 let k_out = cast_type_kind(bcx.tcx(), t_out);
2015 let s_in = k_in == cast_integral && ty::type_is_signed(t_in);
2016 let ll_t_in = type_of::arg_type_of(ccx, t_in);
2017 let ll_t_out = type_of::arg_type_of(ccx, t_out);
2019 // Convert the value to be cast into a ValueRef, either by-ref or
2020 // by-value as appropriate given its type:
2021 let mut datum = unpack_datum!(bcx, trans(bcx, expr));
2023 if cast_is_noop(datum.ty, t_out) {
2025 return DatumBlock::new(bcx, datum);
2028 let newval = match (k_in, k_out) {
2029 (cast_integral, cast_integral) => {
2030 let llexpr = datum.to_llscalarish(bcx);
2031 int_cast(bcx, ll_t_out, ll_t_in, llexpr, s_in)
2033 (cast_float, cast_float) => {
2034 let llexpr = datum.to_llscalarish(bcx);
2035 float_cast(bcx, ll_t_out, ll_t_in, llexpr)
2037 (cast_integral, cast_float) => {
2038 let llexpr = datum.to_llscalarish(bcx);
2040 SIToFP(bcx, llexpr, ll_t_out)
2041 } else { UIToFP(bcx, llexpr, ll_t_out) }
2043 (cast_float, cast_integral) => {
2044 let llexpr = datum.to_llscalarish(bcx);
2045 if ty::type_is_signed(t_out) {
2046 FPToSI(bcx, llexpr, ll_t_out)
2047 } else { FPToUI(bcx, llexpr, ll_t_out) }
2049 (cast_integral, cast_pointer) => {
2050 let llexpr = datum.to_llscalarish(bcx);
2051 IntToPtr(bcx, llexpr, ll_t_out)
2053 (cast_pointer, cast_integral) => {
2054 let llexpr = datum.to_llscalarish(bcx);
2055 PtrToInt(bcx, llexpr, ll_t_out)
2057 (cast_pointer, cast_pointer) => {
2058 let llexpr = datum.to_llscalarish(bcx);
2059 PointerCast(bcx, llexpr, ll_t_out)
2061 (cast_enum, cast_integral) |
2062 (cast_enum, cast_float) => {
2064 let repr = adt::represent_type(ccx, t_in);
2065 let datum = unpack_datum!(
2066 bcx, datum.to_lvalue_datum(bcx, "trans_imm_cast", expr.id));
2067 let llexpr_ptr = datum.to_llref();
2069 adt::trans_get_discr(bcx, &*repr, llexpr_ptr, Some(Type::i64(ccx)));
2071 cast_integral => int_cast(bcx, ll_t_out,
2072 val_ty(lldiscrim_a),
2074 cast_float => SIToFP(bcx, lldiscrim_a, ll_t_out),
2076 ccx.sess().bug(format!("translating unsupported cast: \
2077 {} ({}) -> {} ({})",
2078 t_in.repr(bcx.tcx()),
2080 t_out.repr(bcx.tcx()),
2085 _ => ccx.sess().bug(format!("translating unsupported cast: \
2086 {} ({}) -> {} ({})",
2087 t_in.repr(bcx.tcx()),
2089 t_out.repr(bcx.tcx()),
2092 return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock();
2095 fn trans_assign_op<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2100 -> Block<'blk, 'tcx> {
2101 let _icx = push_ctxt("trans_assign_op");
2104 debug!("trans_assign_op(expr={})", bcx.expr_to_string(expr));
2106 // User-defined operator methods cannot be used with `+=` etc right now
2107 assert!(!bcx.tcx().method_map.borrow().contains_key(&MethodCall::expr(expr.id)));
2109 // Evaluate LHS (destination), which should be an lvalue
2110 let dst_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, dst, "assign_op"));
2111 assert!(!type_needs_drop(bcx.tcx(), dst_datum.ty));
2112 let dst_ty = dst_datum.ty;
2113 let dst = load_ty(bcx, dst_datum.val, dst_datum.ty);
2116 let rhs_datum = unpack_datum!(bcx, trans(bcx, &*src));
2117 let rhs_ty = rhs_datum.ty;
2118 let rhs = rhs_datum.to_llscalarish(bcx);
2120 // Perform computation and store the result
2121 let result_datum = unpack_datum!(
2122 bcx, trans_eager_binop(bcx, expr, dst_datum.ty, op,
2123 dst_ty, dst, rhs_ty, rhs));
2124 return result_datum.store_to(bcx, dst_datum.val);
2127 fn auto_ref<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2128 datum: Datum<'tcx, Expr>,
2130 -> DatumBlock<'blk, 'tcx, Expr> {
2133 // Ensure cleanup of `datum` if not already scheduled and obtain
2134 // a "by ref" pointer.
2135 let lv_datum = unpack_datum!(bcx, datum.to_lvalue_datum(bcx, "autoref", expr.id));
2137 // Compute final type. Note that we are loose with the region and
2138 // mutability, since those things don't matter in trans.
2139 let referent_ty = lv_datum.ty;
2140 let ptr_ty = ty::mk_imm_rptr(bcx.tcx(), bcx.tcx().mk_region(ty::ReStatic), referent_ty);
2143 let llref = lv_datum.to_llref();
2145 // Construct the resulting datum, using what was the "by ref"
2146 // ValueRef of type `referent_ty` to be the "by value" ValueRef
2147 // of type `&referent_ty`.
2148 DatumBlock::new(bcx, Datum::new(llref, ptr_ty, RvalueExpr(Rvalue::new(ByValue))))
2151 fn deref_multiple<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2153 datum: Datum<'tcx, Expr>,
2155 -> DatumBlock<'blk, 'tcx, Expr> {
2157 let mut datum = datum;
2158 for i in range(0, times) {
2159 let method_call = MethodCall::autoderef(expr.id, i);
2160 datum = unpack_datum!(bcx, deref_once(bcx, expr, datum, method_call));
2162 DatumBlock { bcx: bcx, datum: datum }
2165 fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2167 datum: Datum<'tcx, Expr>,
2168 method_call: MethodCall)
2169 -> DatumBlock<'blk, 'tcx, Expr> {
2170 let ccx = bcx.ccx();
2172 debug!("deref_once(expr={}, datum={}, method_call={})",
2173 expr.repr(bcx.tcx()),
2174 datum.to_string(ccx),
2179 // Check for overloaded deref.
2180 let method_ty = ccx.tcx().method_map.borrow()
2181 .get(&method_call).map(|method| method.ty);
2182 let datum = match method_ty {
2183 Some(method_ty) => {
2184 // Overloaded. Evaluate `trans_overloaded_op`, which will
2185 // invoke the user's deref() method, which basically
2186 // converts from the `Smaht<T>` pointer that we have into
2187 // a `&T` pointer. We can then proceed down the normal
2188 // path (below) to dereference that `&T`.
2189 let datum = match method_call.adjustment {
2190 // Always perform an AutoPtr when applying an overloaded auto-deref
2191 ty::AutoDeref(_) => unpack_datum!(bcx, auto_ref(bcx, datum, expr)),
2195 let ref_ty = ty::ty_fn_ret(monomorphize_type(bcx, method_ty)).unwrap();
2196 let scratch = rvalue_scratch_datum(bcx, ref_ty, "overloaded_deref");
2198 unpack_result!(bcx, trans_overloaded_op(bcx, expr, method_call,
2199 datum, Vec::new(), Some(SaveIn(scratch.val)),
2201 scratch.to_expr_datum()
2204 // Not overloaded. We already have a pointer we know how to deref.
2209 let r = match datum.ty.sty {
2210 ty::ty_uniq(content_ty) => {
2211 if type_is_sized(bcx.tcx(), content_ty) {
2212 deref_owned_pointer(bcx, expr, datum, content_ty)
2214 // A fat pointer and an opened DST value have the same
2215 // representation just different types. Since there is no
2216 // temporary for `*e` here (because it is unsized), we cannot
2217 // emulate the sized object code path for running drop glue and
2218 // free. Instead, we schedule cleanup for `e`, turning it into
2220 let datum = unpack_datum!(
2221 bcx, datum.to_lvalue_datum(bcx, "deref", expr.id));
2223 let datum = Datum::new(datum.val, ty::mk_open(bcx.tcx(), content_ty), LvalueExpr);
2224 DatumBlock::new(bcx, datum)
2228 ty::ty_ptr(ty::mt { ty: content_ty, .. }) |
2229 ty::ty_rptr(_, ty::mt { ty: content_ty, .. }) => {
2230 if type_is_sized(bcx.tcx(), content_ty) {
2231 let ptr = datum.to_llscalarish(bcx);
2233 // Always generate an lvalue datum, even if datum.mode is
2234 // an rvalue. This is because datum.mode is only an
2235 // rvalue for non-owning pointers like &T or *T, in which
2236 // case cleanup *is* scheduled elsewhere, by the true
2237 // owner (or, in the case of *T, by the user).
2238 DatumBlock::new(bcx, Datum::new(ptr, content_ty, LvalueExpr))
2240 // A fat pointer and an opened DST value have the same representation
2241 // just different types.
2242 DatumBlock::new(bcx, Datum::new(datum.val,
2243 ty::mk_open(bcx.tcx(), content_ty),
2249 bcx.tcx().sess.span_bug(
2251 format!("deref invoked on expr of illegal type {}",
2252 datum.ty.repr(bcx.tcx()))[]);
2256 debug!("deref_once(expr={}, method_call={}, result={})",
2257 expr.id, method_call, r.datum.to_string(ccx));
2261 /// We microoptimize derefs of owned pointers a bit here. Basically, the idea is to make the
2262 /// deref of an rvalue result in an rvalue. This helps to avoid intermediate stack slots in the
2263 /// resulting LLVM. The idea here is that, if the `Box<T>` pointer is an rvalue, then we can
2264 /// schedule a *shallow* free of the `Box<T>` pointer, and then return a ByRef rvalue into the
2265 /// pointer. Because the free is shallow, it is legit to return an rvalue, because we know that
2266 /// the contents are not yet scheduled to be freed. The language rules ensure that the contents
2267 /// will be used (or moved) before the free occurs.
2268 fn deref_owned_pointer<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2270 datum: Datum<'tcx, Expr>,
2271 content_ty: Ty<'tcx>)
2272 -> DatumBlock<'blk, 'tcx, Expr> {
2274 RvalueExpr(Rvalue { mode: ByRef }) => {
2275 let scope = cleanup::temporary_scope(bcx.tcx(), expr.id);
2276 let ptr = Load(bcx, datum.val);
2277 if !type_is_zero_size(bcx.ccx(), content_ty) {
2278 bcx.fcx.schedule_free_value(scope, ptr, cleanup::HeapExchange, content_ty);
2281 RvalueExpr(Rvalue { mode: ByValue }) => {
2282 let scope = cleanup::temporary_scope(bcx.tcx(), expr.id);
2283 if !type_is_zero_size(bcx.ccx(), content_ty) {
2284 bcx.fcx.schedule_free_value(scope, datum.val, cleanup::HeapExchange,
2291 // If we had an rvalue in, we produce an rvalue out.
2292 let (llptr, kind) = match datum.kind {
2294 (Load(bcx, datum.val), LvalueExpr)
2296 RvalueExpr(Rvalue { mode: ByRef }) => {
2297 (Load(bcx, datum.val), RvalueExpr(Rvalue::new(ByRef)))
2299 RvalueExpr(Rvalue { mode: ByValue }) => {
2300 (datum.val, RvalueExpr(Rvalue::new(ByRef)))
2304 let datum = Datum { ty: content_ty, val: llptr, kind: kind };
2305 DatumBlock { bcx: bcx, datum: datum }