1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! # Translation of Expressions
13 //! Public entry points:
15 //! - `trans_into(bcx, expr, dest) -> bcx`: evaluates an expression,
16 //! storing the result into `dest`. This is the preferred form, if you
19 //! - `trans(bcx, expr) -> DatumBlock`: evaluates an expression, yielding
20 //! `Datum` with the result. You can then store the datum, inspect
21 //! the value, etc. This may introduce temporaries if the datum is a
24 //! - `trans_to_lvalue(bcx, expr, "...") -> DatumBlock`: evaluates an
25 //! expression and ensures that the result has a cleanup associated with it,
26 //! creating a temporary stack slot if necessary.
28 //! - `trans_local_var -> Datum`: looks up a local variable or upvar.
30 //! See doc.rs for more comments.
32 #![allow(non_camel_case_types)]
34 pub use self::cast_kind::*;
35 pub use self::Dest::*;
36 use self::lazy_binop_ty::*;
39 use llvm::{self, ValueRef};
41 use middle::mem_categorization::Typer;
42 use middle::subst::{self, Substs};
43 use trans::{_match, adt, asm, base, callee, closure, consts, controlflow};
46 use trans::cleanup::{self, CleanupMethods};
56 use middle::ty::{struct_fields, tup_fields};
57 use middle::ty::{AdjustDerefRef, AdjustReifyFnPointer, AutoUnsafe};
58 use middle::ty::{AutoPtr};
59 use middle::ty::{self, Ty};
60 use middle::ty::MethodCall;
61 use util::common::indenter;
62 use util::ppaux::Repr;
63 use trans::machine::{llsize_of, llsize_of_alloc};
64 use trans::type_::Type;
66 use syntax::{ast, ast_util, codemap};
67 use syntax::print::pprust::{expr_to_string};
69 use syntax::parse::token;
71 use std::iter::repeat;
75 // These are passed around by the code generating functions to track the
76 // destination of a computation's value.
78 #[derive(Copy, PartialEq)]
85 pub fn to_string(&self, ccx: &CrateContext) -> String {
87 SaveIn(v) => format!("SaveIn({})", ccx.tn().val_to_string(v)),
88 Ignore => "Ignore".to_string()
93 /// This function is equivalent to `trans(bcx, expr).store_to_dest(dest)` but it may generate
94 /// better optimized LLVM code.
95 pub fn trans_into<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
98 -> Block<'blk, 'tcx> {
101 if bcx.tcx().adjustments.borrow().contains_key(&expr.id) {
102 // use trans, which may be less efficient but
103 // which will perform the adjustments:
104 let datum = unpack_datum!(bcx, trans(bcx, expr));
105 return datum.store_to_dest(bcx, dest, expr.id)
108 debug!("trans_into() expr={}", expr.repr(bcx.tcx()));
110 let cleanup_debug_loc = debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
114 bcx.fcx.push_ast_cleanup_scope(cleanup_debug_loc);
116 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
117 let kind = ty::expr_kind(bcx.tcx(), expr);
119 ty::LvalueExpr | ty::RvalueDatumExpr => {
120 trans_unadjusted(bcx, expr).store_to_dest(dest, expr.id)
122 ty::RvalueDpsExpr => {
123 trans_rvalue_dps_unadjusted(bcx, expr, dest)
125 ty::RvalueStmtExpr => {
126 trans_rvalue_stmt_unadjusted(bcx, expr)
130 bcx.fcx.pop_and_trans_ast_cleanup_scope(bcx, expr.id)
133 /// Translates an expression, returning a datum (and new block) encapsulating the result. When
134 /// possible, it is preferred to use `trans_into`, as that may avoid creating a temporary on the
136 pub fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
138 -> DatumBlock<'blk, 'tcx, Expr> {
139 debug!("trans(expr={})", bcx.expr_to_string(expr));
144 let cleanup_debug_loc = debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
148 fcx.push_ast_cleanup_scope(cleanup_debug_loc);
149 let datum = unpack_datum!(bcx, trans_unadjusted(bcx, expr));
150 let datum = unpack_datum!(bcx, apply_adjustments(bcx, expr, datum));
151 bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, expr.id);
152 return DatumBlock::new(bcx, datum);
155 pub fn get_len(bcx: Block, fat_ptr: ValueRef) -> ValueRef {
156 GEPi(bcx, fat_ptr, &[0u, abi::FAT_PTR_EXTRA])
159 pub fn get_dataptr(bcx: Block, fat_ptr: ValueRef) -> ValueRef {
160 GEPi(bcx, fat_ptr, &[0u, abi::FAT_PTR_ADDR])
163 /// Helper for trans that apply adjustments from `expr` to `datum`, which should be the unadjusted
164 /// translation of `expr`.
165 fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
167 datum: Datum<'tcx, Expr>)
168 -> DatumBlock<'blk, 'tcx, Expr> {
170 let mut datum = datum;
171 let adjustment = match bcx.tcx().adjustments.borrow().get(&expr.id).cloned() {
173 return DatumBlock::new(bcx, datum);
177 debug!("unadjusted datum for expr {}: {}, adjustment={}",
178 expr.repr(bcx.tcx()),
179 datum.to_string(bcx.ccx()),
180 adjustment.repr(bcx.tcx()));
182 AdjustReifyFnPointer(_def_id) => {
183 // FIXME(#19925) once fn item types are
184 // zero-sized, we'll need to do something here
186 AdjustDerefRef(ref adj) => {
187 let (autoderefs, use_autoref) = match adj.autoref {
188 // Extracting a value from a box counts as a deref, but if we are
189 // just converting Box<[T, ..n]> to Box<[T]> we aren't really doing
190 // a deref (and wouldn't if we could treat Box like a normal struct).
191 Some(ty::AutoUnsizeUniq(..)) => (adj.autoderefs - 1, true),
192 // We are a bit paranoid about adjustments and thus might have a re-
193 // borrow here which merely derefs and then refs again (it might have
194 // a different region or mutability, but we don't care here. It might
195 // also be just in case we need to unsize. But if there are no nested
196 // adjustments then it should be a no-op).
197 Some(ty::AutoPtr(_, _, None)) if adj.autoderefs == 1 => {
199 // Don't skip a conversion from Box<T> to &T, etc.
201 let method_call = MethodCall::autoderef(expr.id, adj.autoderefs-1);
202 let method = bcx.tcx().method_map.borrow().get(&method_call).is_some();
204 // Don't skip an overloaded deref.
205 (adj.autoderefs, true)
207 (adj.autoderefs - 1, false)
210 _ => (adj.autoderefs, true),
213 _ => (adj.autoderefs, true)
218 let lval = unpack_datum!(bcx, datum.to_lvalue_datum(bcx, "auto_deref", expr.id));
219 datum = unpack_datum!(
220 bcx, deref_multiple(bcx, expr, lval.to_expr_datum(), autoderefs));
223 // (You might think there is a more elegant way to do this than a
224 // use_autoref bool, but then you remember that the borrow checker exists).
225 if let (true, &Some(ref a)) = (use_autoref, &adj.autoref) {
226 datum = unpack_datum!(bcx, apply_autoref(a,
233 debug!("after adjustments, datum={}", datum.to_string(bcx.ccx()));
234 return DatumBlock::new(bcx, datum);
236 fn apply_autoref<'blk, 'tcx>(autoref: &ty::AutoRef<'tcx>,
237 bcx: Block<'blk, 'tcx>,
239 datum: Datum<'tcx, Expr>)
240 -> DatumBlock<'blk, 'tcx, Expr> {
242 let mut datum = datum;
244 let datum = match autoref {
245 &AutoPtr(_, _, ref a) | &AutoUnsafe(_, ref a) => {
248 &Some(box ref a) => {
249 datum = unpack_datum!(bcx, apply_autoref(a, bcx, expr, datum));
253 unpack_datum!(bcx, ref_ptr(bcx, expr, datum))
255 &ty::AutoUnsize(ref k) => {
256 debug!(" AutoUnsize");
257 unpack_datum!(bcx, unsize_expr(bcx, expr, datum, k))
260 &ty::AutoUnsizeUniq(ty::UnsizeLength(len)) => {
261 debug!(" AutoUnsizeUniq(UnsizeLength)");
262 unpack_datum!(bcx, unsize_unique_vec(bcx, expr, datum, len))
264 &ty::AutoUnsizeUniq(ref k) => {
265 debug!(" AutoUnsizeUniq");
266 unpack_datum!(bcx, unsize_unique_expr(bcx, expr, datum, k))
270 DatumBlock::new(bcx, datum)
273 fn ref_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
275 datum: Datum<'tcx, Expr>)
276 -> DatumBlock<'blk, 'tcx, Expr> {
277 debug!("ref_ptr(expr={}, datum={})",
278 expr.repr(bcx.tcx()),
279 datum.to_string(bcx.ccx()));
281 if !type_is_sized(bcx.tcx(), datum.ty) {
282 debug!("Taking address of unsized type {}",
283 bcx.ty_to_string(datum.ty));
284 ref_fat_ptr(bcx, expr, datum)
286 debug!("Taking address of sized type {}",
287 bcx.ty_to_string(datum.ty));
288 auto_ref(bcx, datum, expr)
292 // Retrieve the information we are losing (making dynamic) in an unsizing
294 // When making a dtor, we need to do different things depending on the
295 // ownership of the object.. mk_ty is a function for turning `unadjusted_ty`
296 // into a type to be destructed. If we want to end up with a Box pointer,
297 // then mk_ty should make a Box pointer (T -> Box<T>), if we want a
298 // borrowed reference then it should be T -> &T.
299 fn unsized_info<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
300 kind: &ty::UnsizeKind<'tcx>,
302 unadjusted_ty: Ty<'tcx>,
303 mk_ty: F) -> ValueRef where
304 F: FnOnce(Ty<'tcx>) -> Ty<'tcx>,
306 // FIXME(#19596) workaround: `|t| t` causes monomorphization recursion
307 fn identity<T>(t: T) -> T { t }
309 debug!("unsized_info(kind={:?}, id={}, unadjusted_ty={})",
310 kind, id, unadjusted_ty.repr(bcx.tcx()));
312 &ty::UnsizeLength(len) => C_uint(bcx.ccx(), len),
313 &ty::UnsizeStruct(box ref k, tp_index) => match unadjusted_ty.sty {
314 ty::ty_struct(_, ref substs) => {
315 let ty_substs = substs.types.get_slice(subst::TypeSpace);
316 // The dtor for a field treats it like a value, so mk_ty
317 // should just be the identity function.
318 unsized_info(bcx, k, id, ty_substs[tp_index], identity)
320 _ => bcx.sess().bug(format!("UnsizeStruct with bad sty: {}",
321 bcx.ty_to_string(unadjusted_ty)).index(&FullRange))
323 &ty::UnsizeVtable(ty::TyTrait { ref principal, .. }, _) => {
324 // Note that we preserve binding levels here:
325 let substs = principal.0.substs.with_self_ty(unadjusted_ty).erase_regions();
326 let substs = bcx.tcx().mk_substs(substs);
328 ty::Binder(Rc::new(ty::TraitRef { def_id: principal.def_id(),
330 let trait_ref = bcx.monomorphize(&trait_ref);
331 let box_ty = mk_ty(unadjusted_ty);
333 meth::get_vtable(bcx, box_ty, trait_ref),
334 Type::vtable_ptr(bcx.ccx()))
339 fn unsize_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
341 datum: Datum<'tcx, Expr>,
342 k: &ty::UnsizeKind<'tcx>)
343 -> DatumBlock<'blk, 'tcx, Expr> {
345 let datum_ty = datum.ty;
346 let unsized_ty = ty::unsize_ty(tcx, datum_ty, k, expr.span);
347 debug!("unsized_ty={}", unsized_ty.repr(bcx.tcx()));
348 let dest_ty = ty::mk_open(tcx, unsized_ty);
349 debug!("dest_ty={}", unsized_ty.repr(bcx.tcx()));
350 // Closures for extracting and manipulating the data and payload parts of
352 let info = |: bcx, _val| unsized_info(bcx,
357 tcx.mk_region(ty::ReStatic),
360 mutbl: ast::MutImmutable
363 ty::UnsizeStruct(..) =>
364 into_fat_ptr(bcx, expr, datum, dest_ty, |bcx, val| {
365 PointerCast(bcx, val, type_of::type_of(bcx.ccx(), unsized_ty).ptr_to())
367 ty::UnsizeLength(..) =>
368 into_fat_ptr(bcx, expr, datum, dest_ty, |bcx, val| {
369 GEPi(bcx, val, &[0u, 0u])
371 ty::UnsizeVtable(..) =>
372 into_fat_ptr(bcx, expr, datum, dest_ty, |_bcx, val| {
373 PointerCast(bcx, val, Type::i8p(bcx.ccx()))
378 fn ref_fat_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
380 datum: Datum<'tcx, Expr>)
381 -> DatumBlock<'blk, 'tcx, Expr> {
383 let dest_ty = ty::close_type(tcx, datum.ty);
384 let base = |: bcx, val| Load(bcx, get_dataptr(bcx, val));
385 let len = |: bcx, val| Load(bcx, get_len(bcx, val));
386 into_fat_ptr(bcx, expr, datum, dest_ty, base, len)
389 fn into_fat_ptr<'blk, 'tcx, F, G>(bcx: Block<'blk, 'tcx>,
391 datum: Datum<'tcx, Expr>,
395 -> DatumBlock<'blk, 'tcx, Expr> where
396 F: FnOnce(Block<'blk, 'tcx>, ValueRef) -> ValueRef,
397 G: FnOnce(Block<'blk, 'tcx>, ValueRef) -> ValueRef,
402 let lval = unpack_datum!(bcx,
403 datum.to_lvalue_datum(bcx, "into_fat_ptr", expr.id));
404 let base = base(bcx, lval.val);
405 let info = info(bcx, lval.val);
407 let scratch = rvalue_scratch_datum(bcx, dest_ty, "__fat_ptr");
408 Store(bcx, base, get_dataptr(bcx, scratch.val));
409 Store(bcx, info, get_len(bcx, scratch.val));
411 DatumBlock::new(bcx, scratch.to_expr_datum())
414 fn unsize_unique_vec<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
416 datum: Datum<'tcx, Expr>,
418 -> DatumBlock<'blk, 'tcx, Expr> {
422 let datum_ty = datum.ty;
424 let lval = unpack_datum!(bcx,
425 datum.to_lvalue_datum(bcx, "unsize_unique_vec", expr.id));
427 let ll_len = C_uint(bcx.ccx(), len);
428 let unit_ty = ty::sequence_element_type(tcx, ty::type_content(datum_ty));
429 let vec_ty = ty::mk_uniq(tcx, ty::mk_vec(tcx, unit_ty, None));
430 let scratch = rvalue_scratch_datum(bcx, vec_ty, "__unsize_unique");
432 let base = get_dataptr(bcx, scratch.val);
433 let base = PointerCast(bcx,
435 type_of::type_of(bcx.ccx(), datum_ty).ptr_to());
436 bcx = lval.store_to(bcx, base);
438 Store(bcx, ll_len, get_len(bcx, scratch.val));
439 DatumBlock::new(bcx, scratch.to_expr_datum())
442 fn unsize_unique_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
444 datum: Datum<'tcx, Expr>,
445 k: &ty::UnsizeKind<'tcx>)
446 -> DatumBlock<'blk, 'tcx, Expr> {
450 let datum_ty = datum.ty;
451 let unboxed_ty = match datum_ty.sty {
453 _ => bcx.sess().bug(format!("Expected ty_uniq, found {}",
454 bcx.ty_to_string(datum_ty)).index(&FullRange))
456 let result_ty = ty::mk_uniq(tcx, ty::unsize_ty(tcx, unboxed_ty, k, expr.span));
458 let lval = unpack_datum!(bcx,
459 datum.to_lvalue_datum(bcx, "unsize_unique_expr", expr.id));
461 let scratch = rvalue_scratch_datum(bcx, result_ty, "__uniq_fat_ptr");
462 let llbox_ty = type_of::type_of(bcx.ccx(), datum_ty);
463 let base = PointerCast(bcx, get_dataptr(bcx, scratch.val), llbox_ty.ptr_to());
464 bcx = lval.store_to(bcx, base);
466 let info = unsized_info(bcx, k, expr.id, unboxed_ty, |t| ty::mk_uniq(tcx, t));
467 Store(bcx, info, get_len(bcx, scratch.val));
469 let scratch = unpack_datum!(bcx,
470 scratch.to_expr_datum().to_lvalue_datum(bcx,
471 "fresh_uniq_fat_ptr",
474 DatumBlock::new(bcx, scratch.to_expr_datum())
478 /// Translates an expression in "lvalue" mode -- meaning that it returns a reference to the memory
479 /// that the expr represents.
481 /// If this expression is an rvalue, this implies introducing a temporary. In other words,
482 /// something like `x().f` is translated into roughly the equivalent of
484 /// { tmp = x(); tmp.f }
485 pub fn trans_to_lvalue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
488 -> DatumBlock<'blk, 'tcx, Lvalue> {
490 let datum = unpack_datum!(bcx, trans(bcx, expr));
491 return datum.to_lvalue_datum(bcx, name, expr.id);
494 /// A version of `trans` that ignores adjustments. You almost certainly do not want to call this
496 fn trans_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
498 -> DatumBlock<'blk, 'tcx, Expr> {
501 debug!("trans_unadjusted(expr={})", bcx.expr_to_string(expr));
502 let _indenter = indenter();
504 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
506 return match ty::expr_kind(bcx.tcx(), expr) {
507 ty::LvalueExpr | ty::RvalueDatumExpr => {
508 let datum = unpack_datum!(bcx, {
509 trans_datum_unadjusted(bcx, expr)
512 DatumBlock {bcx: bcx, datum: datum}
515 ty::RvalueStmtExpr => {
516 bcx = trans_rvalue_stmt_unadjusted(bcx, expr);
517 nil(bcx, expr_ty(bcx, expr))
520 ty::RvalueDpsExpr => {
521 let ty = expr_ty(bcx, expr);
522 if type_is_zero_size(bcx.ccx(), ty) {
523 bcx = trans_rvalue_dps_unadjusted(bcx, expr, Ignore);
526 let scratch = rvalue_scratch_datum(bcx, ty, "");
527 bcx = trans_rvalue_dps_unadjusted(
528 bcx, expr, SaveIn(scratch.val));
530 // Note: this is not obviously a good idea. It causes
531 // immediate values to be loaded immediately after a
532 // return from a call or other similar expression,
533 // which in turn leads to alloca's having shorter
534 // lifetimes and hence larger stack frames. However,
535 // in turn it can lead to more register pressure.
536 // Still, in practice it seems to increase
537 // performance, since we have fewer problems with
539 let scratch = unpack_datum!(
540 bcx, scratch.to_appropriate_datum(bcx));
542 DatumBlock::new(bcx, scratch.to_expr_datum())
547 fn nil<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ty: Ty<'tcx>)
548 -> DatumBlock<'blk, 'tcx, Expr> {
549 let llval = C_undef(type_of::type_of(bcx.ccx(), ty));
550 let datum = immediate_rvalue(llval, ty);
551 DatumBlock::new(bcx, datum.to_expr_datum())
555 fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
557 -> DatumBlock<'blk, 'tcx, Expr> {
560 let _icx = push_ctxt("trans_datum_unadjusted");
563 ast::ExprParen(ref e) => {
566 ast::ExprPath(_) => {
567 trans_def(bcx, expr, bcx.def(expr.id))
569 ast::ExprField(ref base, ident) => {
570 trans_rec_field(bcx, &**base, ident.node)
572 ast::ExprTupField(ref base, idx) => {
573 trans_rec_tup_field(bcx, &**base, idx.node)
575 ast::ExprIndex(ref base, ref idx) => {
576 trans_index(bcx, expr, &**base, &**idx, MethodCall::expr(expr.id))
578 ast::ExprBox(_, ref contents) => {
579 // Special case for `Box<T>`
580 let box_ty = expr_ty(bcx, expr);
581 let contents_ty = expr_ty(bcx, &**contents);
584 trans_uniq_expr(bcx, box_ty, &**contents, contents_ty)
586 _ => bcx.sess().span_bug(expr.span,
587 "expected unique box")
591 ast::ExprLit(ref lit) => trans_immediate_lit(bcx, expr, &**lit),
592 ast::ExprBinary(op, ref lhs, ref rhs) => {
593 trans_binary(bcx, expr, op, &**lhs, &**rhs)
595 ast::ExprUnary(op, ref x) => {
596 trans_unary(bcx, expr, op, &**x)
598 ast::ExprAddrOf(_, ref x) => {
600 ast::ExprRepeat(..) | ast::ExprVec(..) => {
601 // Special case for slices.
602 let cleanup_debug_loc =
603 debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
607 fcx.push_ast_cleanup_scope(cleanup_debug_loc);
608 let datum = unpack_datum!(
609 bcx, tvec::trans_slice_vec(bcx, expr, &**x));
610 bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, x.id);
611 DatumBlock::new(bcx, datum)
614 trans_addr_of(bcx, expr, &**x)
618 ast::ExprCast(ref val, _) => {
619 // Datum output mode means this is a scalar cast:
620 trans_imm_cast(bcx, &**val, expr.id)
623 bcx.tcx().sess.span_bug(
625 format!("trans_rvalue_datum_unadjusted reached \
626 fall-through case: {:?}",
627 expr.node).index(&FullRange));
632 fn trans_field<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
635 -> DatumBlock<'blk, 'tcx, Expr> where
636 F: FnOnce(&'blk ty::ctxt<'tcx>, &[ty::field<'tcx>]) -> uint,
639 let _icx = push_ctxt("trans_rec_field");
641 let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, base, "field"));
642 let bare_ty = ty::unopen_type(base_datum.ty);
643 let repr = adt::represent_type(bcx.ccx(), bare_ty);
644 with_field_tys(bcx.tcx(), bare_ty, None, move |discr, field_tys| {
645 let ix = get_idx(bcx.tcx(), field_tys);
646 let d = base_datum.get_element(
649 |srcval| adt::trans_field_ptr(bcx, &*repr, srcval, discr, ix));
651 if type_is_sized(bcx.tcx(), d.ty) {
652 DatumBlock { datum: d.to_expr_datum(), bcx: bcx }
654 let scratch = rvalue_scratch_datum(bcx, ty::mk_open(bcx.tcx(), d.ty), "");
655 Store(bcx, d.val, get_dataptr(bcx, scratch.val));
656 let info = Load(bcx, get_len(bcx, base_datum.val));
657 Store(bcx, info, get_len(bcx, scratch.val));
659 DatumBlock::new(bcx, scratch.to_expr_datum())
666 /// Translates `base.field`.
667 fn trans_rec_field<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
670 -> DatumBlock<'blk, 'tcx, Expr> {
671 trans_field(bcx, base, |tcx, field_tys| ty::field_idx_strict(tcx, field.name, field_tys))
674 /// Translates `base.<idx>`.
675 fn trans_rec_tup_field<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
678 -> DatumBlock<'blk, 'tcx, Expr> {
679 trans_field(bcx, base, |_, _| idx)
682 fn trans_index<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
683 index_expr: &ast::Expr,
686 method_call: MethodCall)
687 -> DatumBlock<'blk, 'tcx, Expr> {
688 //! Translates `base[idx]`.
690 let _icx = push_ctxt("trans_index");
694 // Check for overloaded index.
695 let method_ty = ccx.tcx()
699 .map(|method| method.ty);
700 let elt_datum = match method_ty {
702 let method_ty = monomorphize_type(bcx, method_ty);
704 let base_datum = unpack_datum!(bcx, trans(bcx, base));
706 // Translate index expression.
707 let ix_datum = unpack_datum!(bcx, trans(bcx, idx));
709 let ref_ty = // invoked methods have LB regions instantiated:
710 ty::assert_no_late_bound_regions(
711 bcx.tcx(), &ty::ty_fn_ret(method_ty)).unwrap();
712 let elt_ty = match ty::deref(ref_ty, true) {
714 bcx.tcx().sess.span_bug(index_expr.span,
715 "index method didn't return a \
716 dereferenceable type?!")
718 Some(elt_tm) => elt_tm.ty,
721 // Overloaded. Evaluate `trans_overloaded_op`, which will
722 // invoke the user's index() method, which basically yields
723 // a `&T` pointer. We can then proceed down the normal
724 // path (below) to dereference that `&T`.
725 let scratch = rvalue_scratch_datum(bcx, ref_ty, "overloaded_index_elt");
727 trans_overloaded_op(bcx,
731 vec![(ix_datum, idx.id)],
732 Some(SaveIn(scratch.val)),
734 let datum = scratch.to_expr_datum();
735 if type_is_sized(bcx.tcx(), elt_ty) {
736 Datum::new(datum.to_llscalarish(bcx), elt_ty, LvalueExpr)
738 Datum::new(datum.val, ty::mk_open(bcx.tcx(), elt_ty), LvalueExpr)
742 let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx,
746 // Translate index expression and cast to a suitable LLVM integer.
747 // Rust is less strict than LLVM in this regard.
748 let ix_datum = unpack_datum!(bcx, trans(bcx, idx));
749 let ix_val = ix_datum.to_llscalarish(bcx);
750 let ix_size = machine::llbitsize_of_real(bcx.ccx(),
752 let int_size = machine::llbitsize_of_real(bcx.ccx(),
755 if ix_size < int_size {
756 if ty::type_is_signed(expr_ty(bcx, idx)) {
757 SExt(bcx, ix_val, ccx.int_type())
758 } else { ZExt(bcx, ix_val, ccx.int_type()) }
759 } else if ix_size > int_size {
760 Trunc(bcx, ix_val, ccx.int_type())
768 ty::sequence_element_type(bcx.tcx(),
770 base::maybe_name_value(bcx.ccx(), vt.llunit_size, "unit_sz");
772 let (base, len) = base_datum.get_vec_base_and_len(bcx);
774 debug!("trans_index: base {}", bcx.val_to_string(base));
775 debug!("trans_index: len {}", bcx.val_to_string(len));
777 let bounds_check = ICmp(bcx, llvm::IntUGE, ix_val, len);
778 let expect = ccx.get_intrinsic(&("llvm.expect.i1"));
779 let expected = Call(bcx,
781 &[bounds_check, C_bool(ccx, false)],
783 bcx = with_cond(bcx, expected, |bcx| {
784 controlflow::trans_fail_bounds_check(bcx,
789 let elt = InBoundsGEP(bcx, base, &[ix_val]);
790 let elt = PointerCast(bcx, elt, vt.llunit_ty.ptr_to());
791 Datum::new(elt, vt.unit_ty, LvalueExpr)
795 DatumBlock::new(bcx, elt_datum)
798 fn trans_def<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
799 ref_expr: &ast::Expr,
801 -> DatumBlock<'blk, 'tcx, Expr> {
802 //! Translates a reference to a path.
804 let _icx = push_ctxt("trans_def_lvalue");
806 def::DefFn(..) | def::DefStaticMethod(..) | def::DefMethod(..) |
807 def::DefStruct(_) | def::DefVariant(..) => {
808 let datum = trans_def_fn_unadjusted(bcx.ccx(), ref_expr, def,
809 bcx.fcx.param_substs);
810 DatumBlock::new(bcx, datum.to_expr_datum())
812 def::DefStatic(did, _) => {
813 // There are two things that may happen here:
814 // 1) If the static item is defined in this crate, it will be
815 // translated using `get_item_val`, and we return a pointer to
817 // 2) If the static item is defined in another crate then we add
818 // (or reuse) a declaration of an external global, and return a
820 let const_ty = expr_ty(bcx, ref_expr);
822 fn get_val<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, did: ast::DefId,
823 const_ty: Ty<'tcx>) -> ValueRef {
824 // For external constants, we don't inline.
825 if did.krate == ast::LOCAL_CRATE {
828 // The LLVM global has the type of its initializer,
829 // which may not be equal to the enum's type for
831 let val = base::get_item_val(bcx.ccx(), did.node);
832 let pty = type_of::type_of(bcx.ccx(), const_ty).ptr_to();
833 PointerCast(bcx, val, pty)
836 base::get_extern_const(bcx.ccx(), did, const_ty)
839 let val = get_val(bcx, did, const_ty);
840 DatumBlock::new(bcx, Datum::new(val, const_ty, LvalueExpr))
842 def::DefConst(did) => {
843 // First, inline any external constants into the local crate so we
844 // can be sure to get the LLVM value corresponding to it.
845 let did = inline::maybe_instantiate_inline(bcx.ccx(), did);
846 if did.krate != ast::LOCAL_CRATE {
847 bcx.tcx().sess.span_bug(ref_expr.span,
848 "cross crate constant could not \
851 let val = base::get_item_val(bcx.ccx(), did.node);
853 // Next, we need to crate a ByRef rvalue datum to return. We can't
854 // use the normal .to_ref_datum() function because the type of
855 // `val` is not actually the same as `const_ty`.
857 // To get around this, we make a custom alloca slot with the
858 // appropriate type (const_ty), and then we cast it to a pointer of
859 // typeof(val), store the value, and then hand this slot over to
860 // the datum infrastructure.
861 let const_ty = expr_ty(bcx, ref_expr);
862 let llty = type_of::type_of(bcx.ccx(), const_ty);
863 let slot = alloca(bcx, llty, "const");
864 let pty = Type::from_ref(unsafe { llvm::LLVMTypeOf(val) }).ptr_to();
865 Store(bcx, val, PointerCast(bcx, slot, pty));
867 let datum = Datum::new(slot, const_ty, Rvalue::new(ByRef));
868 DatumBlock::new(bcx, datum.to_expr_datum())
871 DatumBlock::new(bcx, trans_local_var(bcx, def).to_expr_datum())
876 fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
878 -> Block<'blk, 'tcx> {
880 let _icx = push_ctxt("trans_rvalue_stmt");
882 if bcx.unreachable.get() {
886 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
889 ast::ExprParen(ref e) => {
890 trans_into(bcx, &**e, Ignore)
892 ast::ExprBreak(label_opt) => {
893 controlflow::trans_break(bcx, expr.id, label_opt)
895 ast::ExprAgain(label_opt) => {
896 controlflow::trans_cont(bcx, expr.id, label_opt)
898 ast::ExprRet(ref ex) => {
899 // Check to see if the return expression itself is reachable.
900 // This can occur when the inner expression contains a return
901 let reachable = if let Some(ref cfg) = bcx.fcx.cfg {
902 cfg.node_is_reachable(expr.id)
908 controlflow::trans_ret(bcx, ex.as_ref().map(|e| &**e))
910 // If it's not reachable, just translate the inner expression
911 // directly. This avoids having to manage a return slot when
912 // it won't actually be used anyway.
913 if let &Some(ref x) = ex {
914 bcx = trans_into(bcx, &**x, Ignore);
916 // Mark the end of the block as unreachable. Once we get to
917 // a return expression, there's no more we should be doing
923 ast::ExprWhile(ref cond, ref body, _) => {
924 controlflow::trans_while(bcx, expr.id, &**cond, &**body)
926 ast::ExprForLoop(ref pat, ref head, ref body, _) => {
927 controlflow::trans_for(bcx,
933 ast::ExprLoop(ref body, _) => {
934 controlflow::trans_loop(bcx, expr.id, &**body)
936 ast::ExprAssign(ref dst, ref src) => {
937 let src_datum = unpack_datum!(bcx, trans(bcx, &**src));
938 let dst_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, &**dst, "assign"));
940 if type_needs_drop(bcx.tcx(), dst_datum.ty) {
941 // If there are destructors involved, make sure we
942 // are copying from an rvalue, since that cannot possible
943 // alias an lvalue. We are concerned about code like:
951 // where e.g. a : Option<Foo> and a.b :
952 // Option<Foo>. In that case, freeing `a` before the
953 // assignment may also free `a.b`!
955 // We could avoid this intermediary with some analysis
956 // to determine whether `dst` may possibly own `src`.
957 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
958 let src_datum = unpack_datum!(
959 bcx, src_datum.to_rvalue_datum(bcx, "ExprAssign"));
960 bcx = glue::drop_ty(bcx,
963 Some(NodeInfo { id: expr.id, span: expr.span }));
964 src_datum.store_to(bcx, dst_datum.val)
966 src_datum.store_to(bcx, dst_datum.val)
969 ast::ExprAssignOp(op, ref dst, ref src) => {
970 trans_assign_op(bcx, expr, op, &**dst, &**src)
972 ast::ExprInlineAsm(ref a) => {
973 asm::trans_inline_asm(bcx, a)
976 bcx.tcx().sess.span_bug(
978 format!("trans_rvalue_stmt_unadjusted reached \
979 fall-through case: {:?}",
980 expr.node).index(&FullRange));
985 fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
988 -> Block<'blk, 'tcx> {
989 let _icx = push_ctxt("trans_rvalue_dps_unadjusted");
993 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
996 ast::ExprParen(ref e) => {
997 trans_into(bcx, &**e, dest)
999 ast::ExprPath(_) => {
1000 trans_def_dps_unadjusted(bcx, expr, bcx.def(expr.id), dest)
1002 ast::ExprIf(ref cond, ref thn, ref els) => {
1003 controlflow::trans_if(bcx, expr.id, &**cond, &**thn, els.as_ref().map(|e| &**e), dest)
1005 ast::ExprMatch(ref discr, ref arms, _) => {
1006 _match::trans_match(bcx, expr, &**discr, arms.index(&FullRange), dest)
1008 ast::ExprBlock(ref blk) => {
1009 controlflow::trans_block(bcx, &**blk, dest)
1011 ast::ExprStruct(_, ref fields, ref base) => {
1013 fields.index(&FullRange),
1014 base.as_ref().map(|e| &**e),
1017 node_id_type(bcx, expr.id),
1020 ast::ExprRange(ref start, ref end) => {
1021 // FIXME it is just not right that we are synthesising ast nodes in
1023 fn make_field(field_name: &str, expr: P<ast::Expr>) -> ast::Field {
1025 ident: codemap::dummy_spanned(token::str_to_ident(field_name)),
1027 span: codemap::DUMMY_SP,
1031 // A range just desugars into a struct.
1032 // Note that the type of the start and end may not be the same, but
1033 // they should only differ in their lifetime, which should not matter
1035 let (did, fields, ty_params) = match (start, end) {
1036 (&Some(ref start), &Some(ref end)) => {
1038 let fields = vec![make_field("start", start.clone()),
1039 make_field("end", end.clone())];
1040 (tcx.lang_items.range_struct(), fields, vec![node_id_type(bcx, start.id)])
1042 (&Some(ref start), &None) => {
1043 // Desugar to RangeFrom
1044 let fields = vec![make_field("start", start.clone())];
1045 (tcx.lang_items.range_from_struct(), fields, vec![node_id_type(bcx, start.id)])
1047 (&None, &Some(ref end)) => {
1048 // Desugar to RangeTo
1049 let fields = vec![make_field("end", end.clone())];
1050 (tcx.lang_items.range_to_struct(), fields, vec![node_id_type(bcx, end.id)])
1053 // Desugar to FullRange
1054 (tcx.lang_items.full_range_struct(), vec![], vec![])
1058 if let Some(did) = did {
1059 let substs = Substs::new_type(ty_params, vec![]);
1065 ty::mk_struct(tcx, did, tcx.mk_substs(substs)),
1068 tcx.sess.span_bug(expr.span,
1069 "No lang item for ranges (how did we get this far?)")
1072 ast::ExprTup(ref args) => {
1073 let numbered_fields: Vec<(uint, &ast::Expr)> =
1074 args.iter().enumerate().map(|(i, arg)| (i, &**arg)).collect();
1078 numbered_fields.index(&FullRange),
1081 Some(NodeInfo { id: expr.id, span: expr.span }))
1083 ast::ExprLit(ref lit) => {
1085 ast::LitStr(ref s, _) => {
1086 tvec::trans_lit_str(bcx, expr, (*s).clone(), dest)
1091 .span_bug(expr.span,
1092 "trans_rvalue_dps_unadjusted shouldn't be \
1093 translating this type of literal")
1097 ast::ExprVec(..) | ast::ExprRepeat(..) => {
1098 tvec::trans_fixed_vstore(bcx, expr, dest)
1100 ast::ExprClosure(_, _, ref decl, ref body) => {
1101 // Check the side-table to see whether this is an unboxed
1102 // closure or an older, legacy style closure. Store this
1103 // into a variable to ensure the the RefCell-lock is
1104 // released before we recurse.
1105 let is_unboxed_closure =
1106 bcx.tcx().unboxed_closures.borrow().contains_key(&ast_util::local_def(expr.id));
1107 if is_unboxed_closure {
1108 closure::trans_unboxed_closure(bcx, &**decl, &**body, expr.id, dest)
1110 let expr_ty = expr_ty(bcx, expr);
1111 let store = ty::ty_closure_store(expr_ty);
1112 debug!("translating block function {} with type {}",
1113 expr_to_string(expr), expr_ty.repr(tcx));
1114 closure::trans_expr_fn(bcx, store, &**decl, &**body, expr.id, dest)
1117 ast::ExprCall(ref f, ref args) => {
1118 if bcx.tcx().is_method_call(expr.id) {
1119 trans_overloaded_call(bcx,
1122 args.index(&FullRange),
1125 callee::trans_call(bcx,
1128 callee::ArgExprs(args.index(&FullRange)),
1132 ast::ExprMethodCall(_, _, ref args) => {
1133 callee::trans_method_call(bcx,
1136 callee::ArgExprs(args.index(&FullRange)),
1139 ast::ExprBinary(op, ref lhs, ref rhs) => {
1140 // if not overloaded, would be RvalueDatumExpr
1141 let lhs = unpack_datum!(bcx, trans(bcx, &**lhs));
1142 let rhs_datum = unpack_datum!(bcx, trans(bcx, &**rhs));
1143 trans_overloaded_op(bcx, expr, MethodCall::expr(expr.id), lhs,
1144 vec![(rhs_datum, rhs.id)], Some(dest),
1145 !ast_util::is_by_value_binop(op)).bcx
1147 ast::ExprUnary(op, ref subexpr) => {
1148 // if not overloaded, would be RvalueDatumExpr
1149 let arg = unpack_datum!(bcx, trans(bcx, &**subexpr));
1150 trans_overloaded_op(bcx, expr, MethodCall::expr(expr.id),
1151 arg, Vec::new(), Some(dest), !ast_util::is_by_value_unop(op)).bcx
1153 ast::ExprIndex(ref base, ref idx) => {
1154 // if not overloaded, would be RvalueDatumExpr
1155 let base = unpack_datum!(bcx, trans(bcx, &**base));
1156 let idx_datum = unpack_datum!(bcx, trans(bcx, &**idx));
1157 trans_overloaded_op(bcx, expr, MethodCall::expr(expr.id), base,
1158 vec![(idx_datum, idx.id)], Some(dest), true).bcx
1160 ast::ExprCast(ref val, _) => {
1161 // DPS output mode means this is a trait cast:
1162 if ty::type_is_trait(node_id_type(bcx, expr.id)) {
1164 bcx.tcx().object_cast_map.borrow()
1166 .map(|t| (*t).clone())
1168 let trait_ref = bcx.monomorphize(&trait_ref);
1169 let datum = unpack_datum!(bcx, trans(bcx, &**val));
1170 meth::trans_trait_cast(bcx, datum, expr.id,
1173 bcx.tcx().sess.span_bug(expr.span,
1174 "expr_cast of non-trait");
1177 ast::ExprAssignOp(op, ref dst, ref src) => {
1178 trans_assign_op(bcx, expr, op, &**dst, &**src)
1181 bcx.tcx().sess.span_bug(
1183 format!("trans_rvalue_dps_unadjusted reached fall-through \
1185 expr.node).index(&FullRange));
1190 fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1191 ref_expr: &ast::Expr,
1194 -> Block<'blk, 'tcx> {
1195 let _icx = push_ctxt("trans_def_dps_unadjusted");
1197 let lldest = match dest {
1198 SaveIn(lldest) => lldest,
1199 Ignore => { return bcx; }
1203 def::DefVariant(tid, vid, _) => {
1204 let variant_info = ty::enum_variant_with_id(bcx.tcx(), tid, vid);
1205 if variant_info.args.len() > 0u {
1207 let llfn = callee::trans_fn_ref(bcx.ccx(), vid,
1208 ExprId(ref_expr.id),
1209 bcx.fcx.param_substs).val;
1210 Store(bcx, llfn, lldest);
1214 let ty = expr_ty(bcx, ref_expr);
1215 let repr = adt::represent_type(bcx.ccx(), ty);
1216 adt::trans_set_discr(bcx, &*repr, lldest,
1217 variant_info.disr_val);
1221 def::DefStruct(_) => {
1222 let ty = expr_ty(bcx, ref_expr);
1224 ty::ty_struct(did, _) if ty::has_dtor(bcx.tcx(), did) => {
1225 let repr = adt::represent_type(bcx.ccx(), ty);
1226 adt::trans_set_discr(bcx, &*repr, lldest, 0);
1233 bcx.tcx().sess.span_bug(ref_expr.span, format!(
1234 "Non-DPS def {:?} referened by {}",
1235 def, bcx.node_id_to_string(ref_expr.id)).index(&FullRange));
1240 pub fn trans_def_fn_unadjusted<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1241 ref_expr: &ast::Expr,
1243 param_substs: &subst::Substs<'tcx>)
1244 -> Datum<'tcx, Rvalue> {
1245 let _icx = push_ctxt("trans_def_datum_unadjusted");
1248 def::DefFn(did, _) |
1249 def::DefStruct(did) | def::DefVariant(_, did, _) |
1250 def::DefStaticMethod(did, def::FromImpl(_)) |
1251 def::DefMethod(did, _, def::FromImpl(_)) => {
1252 callee::trans_fn_ref(ccx, did, ExprId(ref_expr.id), param_substs)
1254 def::DefStaticMethod(impl_did, def::FromTrait(trait_did)) |
1255 def::DefMethod(impl_did, _, def::FromTrait(trait_did)) => {
1256 meth::trans_static_method_callee(ccx, impl_did,
1257 trait_did, ref_expr.id,
1261 ccx.tcx().sess.span_bug(ref_expr.span, format!(
1262 "trans_def_fn_unadjusted invoked on: {:?} for {}",
1264 ref_expr.repr(ccx.tcx())).index(&FullRange));
1269 /// Translates a reference to a local variable or argument. This always results in an lvalue datum.
1270 pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1272 -> Datum<'tcx, Lvalue> {
1273 let _icx = push_ctxt("trans_local_var");
1276 def::DefUpvar(nid, _, _) => {
1277 // Can't move upvars, so this is never a ZeroMemLastUse.
1278 let local_ty = node_id_type(bcx, nid);
1279 match bcx.fcx.llupvars.borrow().get(&nid) {
1280 Some(&val) => Datum::new(val, local_ty, Lvalue),
1282 bcx.sess().bug(format!(
1283 "trans_local_var: no llval for upvar {} found",
1284 nid).index(&FullRange));
1288 def::DefLocal(nid) => {
1289 let datum = match bcx.fcx.lllocals.borrow().get(&nid) {
1292 bcx.sess().bug(format!(
1293 "trans_local_var: no datum for local/arg {} found",
1294 nid).index(&FullRange));
1297 debug!("take_local(nid={}, v={}, ty={})",
1298 nid, bcx.val_to_string(datum.val), bcx.ty_to_string(datum.ty));
1302 bcx.sess().unimpl(format!(
1303 "unsupported def type in trans_local_var: {:?}",
1304 def).index(&FullRange));
1309 /// Helper for enumerating the field types of structs, enums, or records. The optional node ID here
1310 /// is the node ID of the path identifying the enum variant in use. If none, this cannot possibly
1311 /// an enum variant (so, if it is and `node_id_opt` is none, this function panics).
1312 pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>,
1314 node_id_opt: Option<ast::NodeId>,
1317 F: FnOnce(ty::Disr, &[ty::field<'tcx>]) -> R,
1320 ty::ty_struct(did, substs) => {
1321 op(0, struct_fields(tcx, did, substs).index(&FullRange))
1324 ty::ty_tup(ref v) => {
1325 op(0, tup_fields(v.index(&FullRange)).index(&FullRange))
1328 ty::ty_enum(_, substs) => {
1329 // We want the *variant* ID here, not the enum ID.
1332 tcx.sess.bug(format!(
1333 "cannot get field types from the enum type {} \
1335 ty.repr(tcx)).index(&FullRange));
1338 let def = tcx.def_map.borrow()[node_id].clone();
1340 def::DefVariant(enum_id, variant_id, _) => {
1341 let variant_info = ty::enum_variant_with_id(
1342 tcx, enum_id, variant_id);
1343 op(variant_info.disr_val,
1346 substs).index(&FullRange))
1349 tcx.sess.bug("resolve didn't map this expr to a \
1358 tcx.sess.bug(format!(
1359 "cannot get field types from the type {}",
1360 ty.repr(tcx)).index(&FullRange));
1365 fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1366 fields: &[ast::Field],
1367 base: Option<&ast::Expr>,
1368 expr_span: codemap::Span,
1369 expr_id: ast::NodeId,
1371 dest: Dest) -> Block<'blk, 'tcx> {
1372 let _icx = push_ctxt("trans_rec");
1374 let tcx = bcx.tcx();
1375 with_field_tys(tcx, ty, Some(expr_id), |discr, field_tys| {
1376 let mut need_base: Vec<bool> = repeat(true).take(field_tys.len()).collect();
1378 let numbered_fields = fields.iter().map(|field| {
1380 field_tys.iter().position(|field_ty|
1381 field_ty.name == field.ident.node.name);
1382 let result = match opt_pos {
1384 need_base[i] = false;
1388 tcx.sess.span_bug(field.span,
1389 "Couldn't find field in struct type")
1393 }).collect::<Vec<_>>();
1394 let optbase = match base {
1395 Some(base_expr) => {
1396 let mut leftovers = Vec::new();
1397 for (i, b) in need_base.iter().enumerate() {
1399 leftovers.push((i, field_tys[i].mt.ty));
1402 Some(StructBaseInfo {expr: base_expr,
1403 fields: leftovers })
1406 if need_base.iter().any(|b| *b) {
1407 tcx.sess.span_bug(expr_span, "missing fields and no base expr")
1416 numbered_fields.as_slice(),
1419 Some(NodeInfo { id: expr_id, span: expr_span }))
1423 /// Information that `trans_adt` needs in order to fill in the fields
1424 /// of a struct copied from a base struct (e.g., from an expression
1425 /// like `Foo { a: b, ..base }`.
1427 /// Note that `fields` may be empty; the base expression must always be
1428 /// evaluated for side-effects.
1429 pub struct StructBaseInfo<'a, 'tcx> {
1430 /// The base expression; will be evaluated after all explicit fields.
1431 expr: &'a ast::Expr,
1432 /// The indices of fields to copy paired with their types.
1433 fields: Vec<(uint, Ty<'tcx>)>
1436 /// Constructs an ADT instance:
1438 /// - `fields` should be a list of field indices paired with the
1439 /// expression to store into that field. The initializers will be
1440 /// evaluated in the order specified by `fields`.
1442 /// - `optbase` contains information on the base struct (if any) from
1443 /// which remaining fields are copied; see comments on `StructBaseInfo`.
1444 pub fn trans_adt<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1447 fields: &[(uint, &ast::Expr)],
1448 optbase: Option<StructBaseInfo<'a, 'tcx>>,
1450 source_location: Option<NodeInfo>)
1451 -> Block<'blk, 'tcx> {
1452 let _icx = push_ctxt("trans_adt");
1454 let repr = adt::represent_type(bcx.ccx(), ty);
1456 match source_location {
1457 Some(src_loc) => debuginfo::set_source_location(bcx.fcx,
1463 // If we don't care about the result, just make a
1464 // temporary stack slot
1465 let addr = match dest {
1467 Ignore => alloc_ty(bcx, ty, "temp"),
1470 // This scope holds intermediates that must be cleaned should
1471 // panic occur before the ADT as a whole is ready.
1472 let custom_cleanup_scope = fcx.push_custom_cleanup_scope();
1474 // First we trans the base, if we have one, to the dest
1475 for base in optbase.iter() {
1476 assert_eq!(discr, 0);
1478 match ty::expr_kind(bcx.tcx(), &*base.expr) {
1479 ty::RvalueDpsExpr | ty::RvalueDatumExpr if !type_needs_drop(bcx.tcx(), ty) => {
1480 bcx = trans_into(bcx, &*base.expr, SaveIn(addr));
1482 ty::RvalueStmtExpr => bcx.tcx().sess.bug("unexpected expr kind for struct base expr"),
1484 let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, &*base.expr, "base"));
1485 for &(i, t) in base.fields.iter() {
1486 let datum = base_datum.get_element(
1487 bcx, t, |srcval| adt::trans_field_ptr(bcx, &*repr, srcval, discr, i));
1488 assert!(type_is_sized(bcx.tcx(), datum.ty));
1489 let dest = adt::trans_field_ptr(bcx, &*repr, addr, discr, i);
1490 bcx = datum.store_to(bcx, dest);
1496 match source_location {
1497 Some(src_loc) => debuginfo::set_source_location(bcx.fcx,
1503 if ty::type_is_simd(bcx.tcx(), ty) {
1504 // This is the constructor of a SIMD type, such types are
1505 // always primitive machine types and so do not have a
1506 // destructor or require any clean-up.
1507 let llty = type_of::type_of(bcx.ccx(), ty);
1509 // keep a vector as a register, and running through the field
1510 // `insertelement`ing them directly into that register
1511 // (i.e. avoid GEPi and `store`s to an alloca) .
1512 let mut vec_val = C_undef(llty);
1514 for &(i, ref e) in fields.iter() {
1515 let block_datum = trans(bcx, &**e);
1516 bcx = block_datum.bcx;
1517 let position = C_uint(bcx.ccx(), i);
1518 let value = block_datum.datum.to_llscalarish(bcx);
1519 vec_val = InsertElement(bcx, vec_val, value, position);
1521 Store(bcx, vec_val, addr);
1523 // Now, we just overwrite the fields we've explicitly specified
1524 for &(i, ref e) in fields.iter() {
1525 let dest = adt::trans_field_ptr(bcx, &*repr, addr, discr, i);
1526 let e_ty = expr_ty_adjusted(bcx, &**e);
1527 bcx = trans_into(bcx, &**e, SaveIn(dest));
1528 let scope = cleanup::CustomScope(custom_cleanup_scope);
1529 fcx.schedule_lifetime_end(scope, dest);
1530 fcx.schedule_drop_mem(scope, dest, e_ty);
1534 adt::trans_set_discr(bcx, &*repr, addr, discr);
1536 fcx.pop_custom_cleanup_scope(custom_cleanup_scope);
1538 // If we don't care about the result drop the temporary we made
1542 bcx = glue::drop_ty(bcx, addr, ty, source_location);
1543 base::call_lifetime_end(bcx, addr);
1550 fn trans_immediate_lit<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1553 -> DatumBlock<'blk, 'tcx, Expr> {
1554 // must not be a string constant, that is a RvalueDpsExpr
1555 let _icx = push_ctxt("trans_immediate_lit");
1556 let ty = expr_ty(bcx, expr);
1557 let v = consts::const_lit(bcx.ccx(), expr, lit);
1558 immediate_rvalue_bcx(bcx, v, ty).to_expr_datumblock()
1561 fn trans_unary<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1564 sub_expr: &ast::Expr)
1565 -> DatumBlock<'blk, 'tcx, Expr> {
1566 let ccx = bcx.ccx();
1568 let _icx = push_ctxt("trans_unary_datum");
1570 let method_call = MethodCall::expr(expr.id);
1572 // The only overloaded operator that is translated to a datum
1573 // is an overloaded deref, since it is always yields a `&T`.
1574 // Otherwise, we should be in the RvalueDpsExpr path.
1576 op == ast::UnDeref ||
1577 !ccx.tcx().method_map.borrow().contains_key(&method_call));
1579 let un_ty = expr_ty(bcx, expr);
1583 let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
1584 let llresult = Not(bcx, datum.to_llscalarish(bcx));
1585 immediate_rvalue_bcx(bcx, llresult, un_ty).to_expr_datumblock()
1588 let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
1589 let val = datum.to_llscalarish(bcx);
1591 if ty::type_is_fp(un_ty) {
1597 immediate_rvalue_bcx(bcx, llneg, un_ty).to_expr_datumblock()
1600 trans_uniq_expr(bcx, un_ty, sub_expr, expr_ty(bcx, sub_expr))
1603 let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
1604 deref_once(bcx, expr, datum, method_call)
1609 fn trans_uniq_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1611 contents: &ast::Expr,
1612 contents_ty: Ty<'tcx>)
1613 -> DatumBlock<'blk, 'tcx, Expr> {
1614 let _icx = push_ctxt("trans_uniq_expr");
1616 assert!(type_is_sized(bcx.tcx(), contents_ty));
1617 let llty = type_of::type_of(bcx.ccx(), contents_ty);
1618 let size = llsize_of(bcx.ccx(), llty);
1619 let align = C_uint(bcx.ccx(), type_of::align_of(bcx.ccx(), contents_ty));
1620 let llty_ptr = llty.ptr_to();
1621 let Result { bcx, val } = malloc_raw_dyn(bcx, llty_ptr, box_ty, size, align);
1622 // Unique boxes do not allocate for zero-size types. The standard library
1623 // may assume that `free` is never called on the pointer returned for
1624 // `Box<ZeroSizeType>`.
1625 let bcx = if llsize_of_alloc(bcx.ccx(), llty) == 0 {
1626 trans_into(bcx, contents, SaveIn(val))
1628 let custom_cleanup_scope = fcx.push_custom_cleanup_scope();
1629 fcx.schedule_free_value(cleanup::CustomScope(custom_cleanup_scope),
1630 val, cleanup::HeapExchange, contents_ty);
1631 let bcx = trans_into(bcx, contents, SaveIn(val));
1632 fcx.pop_custom_cleanup_scope(custom_cleanup_scope);
1635 immediate_rvalue_bcx(bcx, val, box_ty).to_expr_datumblock()
1638 fn trans_addr_of<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1640 subexpr: &ast::Expr)
1641 -> DatumBlock<'blk, 'tcx, Expr> {
1642 let _icx = push_ctxt("trans_addr_of");
1644 let sub_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, subexpr, "addr_of"));
1645 match sub_datum.ty.sty {
1647 // Opened DST value, close to a fat pointer
1648 debug!("Closing fat pointer {}", bcx.ty_to_string(sub_datum.ty));
1650 let scratch = rvalue_scratch_datum(bcx,
1651 ty::close_type(bcx.tcx(), sub_datum.ty),
1653 let base = Load(bcx, get_dataptr(bcx, sub_datum.val));
1654 Store(bcx, base, get_dataptr(bcx, scratch.val));
1656 let len = Load(bcx, get_len(bcx, sub_datum.val));
1657 Store(bcx, len, get_len(bcx, scratch.val));
1659 DatumBlock::new(bcx, scratch.to_expr_datum())
1662 // Sized value, ref to a thin pointer
1663 let ty = expr_ty(bcx, expr);
1664 immediate_rvalue_bcx(bcx, sub_datum.val, ty).to_expr_datumblock()
1669 // Important to get types for both lhs and rhs, because one might be _|_
1670 // and the other not.
1671 fn trans_eager_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1672 binop_expr: &ast::Expr,
1679 -> DatumBlock<'blk, 'tcx, Expr> {
1680 let _icx = push_ctxt("trans_eager_binop");
1682 let tcx = bcx.tcx();
1683 let is_simd = ty::type_is_simd(tcx, lhs_t);
1685 if is_simd { ty::simd_type(tcx, lhs_t) }
1688 let is_float = ty::type_is_fp(intype);
1689 let is_signed = ty::type_is_signed(intype);
1691 let rhs = base::cast_shift_expr_rhs(bcx, op, lhs, rhs);
1694 let val = match op {
1696 if is_float { FAdd(bcx, lhs, rhs) }
1697 else { Add(bcx, lhs, rhs) }
1700 if is_float { FSub(bcx, lhs, rhs) }
1701 else { Sub(bcx, lhs, rhs) }
1704 if is_float { FMul(bcx, lhs, rhs) }
1705 else { Mul(bcx, lhs, rhs) }
1711 // Only zero-check integers; fp /0 is NaN
1712 bcx = base::fail_if_zero_or_overflows(bcx, binop_expr.span,
1713 op, lhs, rhs, rhs_t);
1725 // Only zero-check integers; fp %0 is NaN
1726 bcx = base::fail_if_zero_or_overflows(bcx, binop_expr.span,
1727 op, lhs, rhs, rhs_t);
1735 ast::BiBitOr => Or(bcx, lhs, rhs),
1736 ast::BiBitAnd => And(bcx, lhs, rhs),
1737 ast::BiBitXor => Xor(bcx, lhs, rhs),
1738 ast::BiShl => Shl(bcx, lhs, rhs),
1742 } else { LShr(bcx, lhs, rhs) }
1744 ast::BiEq | ast::BiNe | ast::BiLt | ast::BiGe | ast::BiLe | ast::BiGt => {
1745 if ty::type_is_scalar(rhs_t) {
1746 unpack_result!(bcx, base::compare_scalar_types(bcx, lhs, rhs, rhs_t, op))
1748 base::compare_simd_types(bcx, lhs, rhs, intype, ty::simd_size(tcx, lhs_t), op)
1750 bcx.tcx().sess.span_bug(binop_expr.span, "comparison operator unsupported for type")
1754 bcx.tcx().sess.span_bug(binop_expr.span, "unexpected binop");
1758 immediate_rvalue_bcx(bcx, val, binop_ty).to_expr_datumblock()
1761 // refinement types would obviate the need for this
1762 enum lazy_binop_ty {
1767 fn trans_lazy_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1768 binop_expr: &ast::Expr,
1772 -> DatumBlock<'blk, 'tcx, Expr> {
1773 let _icx = push_ctxt("trans_lazy_binop");
1774 let binop_ty = expr_ty(bcx, binop_expr);
1777 let DatumBlock {bcx: past_lhs, datum: lhs} = trans(bcx, a);
1778 let lhs = lhs.to_llscalarish(past_lhs);
1780 if past_lhs.unreachable.get() {
1781 return immediate_rvalue_bcx(past_lhs, lhs, binop_ty).to_expr_datumblock();
1784 let join = fcx.new_id_block("join", binop_expr.id);
1785 let before_rhs = fcx.new_id_block("before_rhs", b.id);
1788 lazy_and => CondBr(past_lhs, lhs, before_rhs.llbb, join.llbb),
1789 lazy_or => CondBr(past_lhs, lhs, join.llbb, before_rhs.llbb)
1792 let DatumBlock {bcx: past_rhs, datum: rhs} = trans(before_rhs, b);
1793 let rhs = rhs.to_llscalarish(past_rhs);
1795 if past_rhs.unreachable.get() {
1796 return immediate_rvalue_bcx(join, lhs, binop_ty).to_expr_datumblock();
1799 Br(past_rhs, join.llbb);
1800 let phi = Phi(join, Type::i1(bcx.ccx()), &[lhs, rhs],
1801 &[past_lhs.llbb, past_rhs.llbb]);
1803 return immediate_rvalue_bcx(join, phi, binop_ty).to_expr_datumblock();
1806 fn trans_binary<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1811 -> DatumBlock<'blk, 'tcx, Expr> {
1812 let _icx = push_ctxt("trans_binary");
1813 let ccx = bcx.ccx();
1815 // if overloaded, would be RvalueDpsExpr
1816 assert!(!ccx.tcx().method_map.borrow().contains_key(&MethodCall::expr(expr.id)));
1820 trans_lazy_binop(bcx, expr, lazy_and, lhs, rhs)
1823 trans_lazy_binop(bcx, expr, lazy_or, lhs, rhs)
1827 let lhs_datum = unpack_datum!(bcx, trans(bcx, lhs));
1828 let rhs_datum = unpack_datum!(bcx, trans(bcx, rhs));
1829 let binop_ty = expr_ty(bcx, expr);
1831 debug!("trans_binary (expr {}): lhs_datum={}",
1833 lhs_datum.to_string(ccx));
1834 let lhs_ty = lhs_datum.ty;
1835 let lhs = lhs_datum.to_llscalarish(bcx);
1837 debug!("trans_binary (expr {}): rhs_datum={}",
1839 rhs_datum.to_string(ccx));
1840 let rhs_ty = rhs_datum.ty;
1841 let rhs = rhs_datum.to_llscalarish(bcx);
1842 trans_eager_binop(bcx, expr, binop_ty, op,
1843 lhs_ty, lhs, rhs_ty, rhs)
1848 fn trans_overloaded_op<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1850 method_call: MethodCall,
1851 lhs: Datum<'tcx, Expr>,
1852 rhs: Vec<(Datum<'tcx, Expr>, ast::NodeId)>,
1855 -> Result<'blk, 'tcx> {
1856 let method_ty = (*bcx.tcx().method_map.borrow())[method_call].ty;
1857 callee::trans_call_inner(bcx,
1858 Some(expr_info(expr)),
1859 monomorphize_type(bcx, method_ty),
1860 |bcx, arg_cleanup_scope| {
1861 meth::trans_method_callee(bcx,
1866 callee::ArgOverloadedOp(lhs, rhs, autoref),
1870 fn trans_overloaded_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1872 callee: &'a ast::Expr,
1873 args: &'a [P<ast::Expr>],
1875 -> Block<'blk, 'tcx> {
1876 let method_call = MethodCall::expr(expr.id);
1877 let method_type = (*bcx.tcx()
1879 .borrow())[method_call]
1881 let mut all_args = vec!(callee);
1882 all_args.extend(args.iter().map(|e| &**e));
1884 callee::trans_call_inner(bcx,
1885 Some(expr_info(expr)),
1886 monomorphize_type(bcx,
1888 |bcx, arg_cleanup_scope| {
1889 meth::trans_method_callee(
1895 callee::ArgOverloadedCall(all_args),
1900 fn int_cast(bcx: Block,
1906 let _icx = push_ctxt("int_cast");
1908 let srcsz = llvm::LLVMGetIntTypeWidth(llsrctype.to_ref());
1909 let dstsz = llvm::LLVMGetIntTypeWidth(lldsttype.to_ref());
1910 return if dstsz == srcsz {
1911 BitCast(bcx, llsrc, lldsttype)
1912 } else if srcsz > dstsz {
1913 TruncOrBitCast(bcx, llsrc, lldsttype)
1915 SExtOrBitCast(bcx, llsrc, lldsttype)
1917 ZExtOrBitCast(bcx, llsrc, lldsttype)
1922 fn float_cast(bcx: Block,
1927 let _icx = push_ctxt("float_cast");
1928 let srcsz = llsrctype.float_width();
1929 let dstsz = lldsttype.float_width();
1930 return if dstsz > srcsz {
1931 FPExt(bcx, llsrc, lldsttype)
1932 } else if srcsz > dstsz {
1933 FPTrunc(bcx, llsrc, lldsttype)
1937 #[derive(Copy, PartialEq, Show)]
1938 pub enum cast_kind {
1946 pub fn cast_type_kind<'tcx>(tcx: &ty::ctxt<'tcx>, t: Ty<'tcx>) -> cast_kind {
1948 ty::ty_char => cast_integral,
1949 ty::ty_float(..) => cast_float,
1950 ty::ty_rptr(_, mt) | ty::ty_ptr(mt) => {
1951 if type_is_sized(tcx, mt.ty) {
1957 ty::ty_bare_fn(..) => cast_pointer,
1958 ty::ty_int(..) => cast_integral,
1959 ty::ty_uint(..) => cast_integral,
1960 ty::ty_bool => cast_integral,
1961 ty::ty_enum(..) => cast_enum,
1966 fn cast_is_noop<'tcx>(t_in: Ty<'tcx>, t_out: Ty<'tcx>) -> bool {
1967 match (ty::deref(t_in, true), ty::deref(t_out, true)) {
1968 (Some(ty::mt{ ty: t_in, .. }), Some(ty::mt{ ty: t_out, .. })) => {
1975 fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1978 -> DatumBlock<'blk, 'tcx, Expr> {
1979 let _icx = push_ctxt("trans_cast");
1981 let ccx = bcx.ccx();
1983 let t_in = expr_ty(bcx, expr);
1984 let t_out = node_id_type(bcx, id);
1985 let k_in = cast_type_kind(bcx.tcx(), t_in);
1986 let k_out = cast_type_kind(bcx.tcx(), t_out);
1987 let s_in = k_in == cast_integral && ty::type_is_signed(t_in);
1988 let ll_t_in = type_of::arg_type_of(ccx, t_in);
1989 let ll_t_out = type_of::arg_type_of(ccx, t_out);
1991 // Convert the value to be cast into a ValueRef, either by-ref or
1992 // by-value as appropriate given its type:
1993 let mut datum = unpack_datum!(bcx, trans(bcx, expr));
1995 if cast_is_noop(datum.ty, t_out) {
1997 return DatumBlock::new(bcx, datum);
2000 let newval = match (k_in, k_out) {
2001 (cast_integral, cast_integral) => {
2002 let llexpr = datum.to_llscalarish(bcx);
2003 int_cast(bcx, ll_t_out, ll_t_in, llexpr, s_in)
2005 (cast_float, cast_float) => {
2006 let llexpr = datum.to_llscalarish(bcx);
2007 float_cast(bcx, ll_t_out, ll_t_in, llexpr)
2009 (cast_integral, cast_float) => {
2010 let llexpr = datum.to_llscalarish(bcx);
2012 SIToFP(bcx, llexpr, ll_t_out)
2013 } else { UIToFP(bcx, llexpr, ll_t_out) }
2015 (cast_float, cast_integral) => {
2016 let llexpr = datum.to_llscalarish(bcx);
2017 if ty::type_is_signed(t_out) {
2018 FPToSI(bcx, llexpr, ll_t_out)
2019 } else { FPToUI(bcx, llexpr, ll_t_out) }
2021 (cast_integral, cast_pointer) => {
2022 let llexpr = datum.to_llscalarish(bcx);
2023 IntToPtr(bcx, llexpr, ll_t_out)
2025 (cast_pointer, cast_integral) => {
2026 let llexpr = datum.to_llscalarish(bcx);
2027 PtrToInt(bcx, llexpr, ll_t_out)
2029 (cast_pointer, cast_pointer) => {
2030 let llexpr = datum.to_llscalarish(bcx);
2031 PointerCast(bcx, llexpr, ll_t_out)
2033 (cast_enum, cast_integral) |
2034 (cast_enum, cast_float) => {
2036 let repr = adt::represent_type(ccx, t_in);
2037 let datum = unpack_datum!(
2038 bcx, datum.to_lvalue_datum(bcx, "trans_imm_cast", expr.id));
2039 let llexpr_ptr = datum.to_llref();
2041 adt::trans_get_discr(bcx, &*repr, llexpr_ptr, Some(Type::i64(ccx)));
2043 cast_integral => int_cast(bcx, ll_t_out,
2044 val_ty(lldiscrim_a),
2046 cast_float => SIToFP(bcx, lldiscrim_a, ll_t_out),
2048 ccx.sess().bug(format!("translating unsupported cast: \
2049 {} ({:?}) -> {} ({:?})",
2050 t_in.repr(bcx.tcx()),
2052 t_out.repr(bcx.tcx()),
2053 k_out).index(&FullRange))
2057 _ => ccx.sess().bug(format!("translating unsupported cast: \
2058 {} ({:?}) -> {} ({:?})",
2059 t_in.repr(bcx.tcx()),
2061 t_out.repr(bcx.tcx()),
2062 k_out).index(&FullRange))
2064 return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock();
2067 fn trans_assign_op<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2072 -> Block<'blk, 'tcx> {
2073 let _icx = push_ctxt("trans_assign_op");
2076 debug!("trans_assign_op(expr={})", bcx.expr_to_string(expr));
2078 // User-defined operator methods cannot be used with `+=` etc right now
2079 assert!(!bcx.tcx().method_map.borrow().contains_key(&MethodCall::expr(expr.id)));
2081 // Evaluate LHS (destination), which should be an lvalue
2082 let dst_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, dst, "assign_op"));
2083 assert!(!type_needs_drop(bcx.tcx(), dst_datum.ty));
2084 let dst_ty = dst_datum.ty;
2085 let dst = load_ty(bcx, dst_datum.val, dst_datum.ty);
2088 let rhs_datum = unpack_datum!(bcx, trans(bcx, &*src));
2089 let rhs_ty = rhs_datum.ty;
2090 let rhs = rhs_datum.to_llscalarish(bcx);
2092 // Perform computation and store the result
2093 let result_datum = unpack_datum!(
2094 bcx, trans_eager_binop(bcx, expr, dst_datum.ty, op,
2095 dst_ty, dst, rhs_ty, rhs));
2096 return result_datum.store_to(bcx, dst_datum.val);
2099 fn auto_ref<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2100 datum: Datum<'tcx, Expr>,
2102 -> DatumBlock<'blk, 'tcx, Expr> {
2105 // Ensure cleanup of `datum` if not already scheduled and obtain
2106 // a "by ref" pointer.
2107 let lv_datum = unpack_datum!(bcx, datum.to_lvalue_datum(bcx, "autoref", expr.id));
2109 // Compute final type. Note that we are loose with the region and
2110 // mutability, since those things don't matter in trans.
2111 let referent_ty = lv_datum.ty;
2112 let ptr_ty = ty::mk_imm_rptr(bcx.tcx(), bcx.tcx().mk_region(ty::ReStatic), referent_ty);
2115 let llref = lv_datum.to_llref();
2117 // Construct the resulting datum, using what was the "by ref"
2118 // ValueRef of type `referent_ty` to be the "by value" ValueRef
2119 // of type `&referent_ty`.
2120 DatumBlock::new(bcx, Datum::new(llref, ptr_ty, RvalueExpr(Rvalue::new(ByValue))))
2123 fn deref_multiple<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2125 datum: Datum<'tcx, Expr>,
2127 -> DatumBlock<'blk, 'tcx, Expr> {
2129 let mut datum = datum;
2130 for i in range(0, times) {
2131 let method_call = MethodCall::autoderef(expr.id, i);
2132 datum = unpack_datum!(bcx, deref_once(bcx, expr, datum, method_call));
2134 DatumBlock { bcx: bcx, datum: datum }
2137 fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2139 datum: Datum<'tcx, Expr>,
2140 method_call: MethodCall)
2141 -> DatumBlock<'blk, 'tcx, Expr> {
2142 let ccx = bcx.ccx();
2144 debug!("deref_once(expr={}, datum={}, method_call={:?})",
2145 expr.repr(bcx.tcx()),
2146 datum.to_string(ccx),
2151 // Check for overloaded deref.
2152 let method_ty = ccx.tcx().method_map.borrow()
2153 .get(&method_call).map(|method| method.ty);
2154 let datum = match method_ty {
2155 Some(method_ty) => {
2156 let method_ty = monomorphize_type(bcx, method_ty);
2158 // Overloaded. Evaluate `trans_overloaded_op`, which will
2159 // invoke the user's deref() method, which basically
2160 // converts from the `Smaht<T>` pointer that we have into
2161 // a `&T` pointer. We can then proceed down the normal
2162 // path (below) to dereference that `&T`.
2163 let datum = match method_call.adjustment {
2164 // Always perform an AutoPtr when applying an overloaded auto-deref
2165 ty::AutoDeref(_) => unpack_datum!(bcx, auto_ref(bcx, datum, expr)),
2169 let ref_ty = // invoked methods have their LB regions instantiated
2170 ty::assert_no_late_bound_regions(
2171 ccx.tcx(), &ty::ty_fn_ret(method_ty)).unwrap();
2172 let scratch = rvalue_scratch_datum(bcx, ref_ty, "overloaded_deref");
2174 unpack_result!(bcx, trans_overloaded_op(bcx, expr, method_call,
2175 datum, Vec::new(), Some(SaveIn(scratch.val)),
2177 scratch.to_expr_datum()
2180 // Not overloaded. We already have a pointer we know how to deref.
2185 let r = match datum.ty.sty {
2186 ty::ty_uniq(content_ty) => {
2187 if type_is_sized(bcx.tcx(), content_ty) {
2188 deref_owned_pointer(bcx, expr, datum, content_ty)
2190 // A fat pointer and an opened DST value have the same
2191 // representation just different types. Since there is no
2192 // temporary for `*e` here (because it is unsized), we cannot
2193 // emulate the sized object code path for running drop glue and
2194 // free. Instead, we schedule cleanup for `e`, turning it into
2196 let datum = unpack_datum!(
2197 bcx, datum.to_lvalue_datum(bcx, "deref", expr.id));
2199 let datum = Datum::new(datum.val, ty::mk_open(bcx.tcx(), content_ty), LvalueExpr);
2200 DatumBlock::new(bcx, datum)
2204 ty::ty_ptr(ty::mt { ty: content_ty, .. }) |
2205 ty::ty_rptr(_, ty::mt { ty: content_ty, .. }) => {
2206 if type_is_sized(bcx.tcx(), content_ty) {
2207 let ptr = datum.to_llscalarish(bcx);
2209 // Always generate an lvalue datum, even if datum.mode is
2210 // an rvalue. This is because datum.mode is only an
2211 // rvalue for non-owning pointers like &T or *T, in which
2212 // case cleanup *is* scheduled elsewhere, by the true
2213 // owner (or, in the case of *T, by the user).
2214 DatumBlock::new(bcx, Datum::new(ptr, content_ty, LvalueExpr))
2216 // A fat pointer and an opened DST value have the same representation
2217 // just different types.
2218 DatumBlock::new(bcx, Datum::new(datum.val,
2219 ty::mk_open(bcx.tcx(), content_ty),
2225 bcx.tcx().sess.span_bug(
2227 format!("deref invoked on expr of illegal type {}",
2228 datum.ty.repr(bcx.tcx())).index(&FullRange));
2232 debug!("deref_once(expr={}, method_call={:?}, result={})",
2233 expr.id, method_call, r.datum.to_string(ccx));
2237 /// We microoptimize derefs of owned pointers a bit here. Basically, the idea is to make the
2238 /// deref of an rvalue result in an rvalue. This helps to avoid intermediate stack slots in the
2239 /// resulting LLVM. The idea here is that, if the `Box<T>` pointer is an rvalue, then we can
2240 /// schedule a *shallow* free of the `Box<T>` pointer, and then return a ByRef rvalue into the
2241 /// pointer. Because the free is shallow, it is legit to return an rvalue, because we know that
2242 /// the contents are not yet scheduled to be freed. The language rules ensure that the contents
2243 /// will be used (or moved) before the free occurs.
2244 fn deref_owned_pointer<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2246 datum: Datum<'tcx, Expr>,
2247 content_ty: Ty<'tcx>)
2248 -> DatumBlock<'blk, 'tcx, Expr> {
2250 RvalueExpr(Rvalue { mode: ByRef }) => {
2251 let scope = cleanup::temporary_scope(bcx.tcx(), expr.id);
2252 let ptr = Load(bcx, datum.val);
2253 if !type_is_zero_size(bcx.ccx(), content_ty) {
2254 bcx.fcx.schedule_free_value(scope, ptr, cleanup::HeapExchange, content_ty);
2257 RvalueExpr(Rvalue { mode: ByValue }) => {
2258 let scope = cleanup::temporary_scope(bcx.tcx(), expr.id);
2259 if !type_is_zero_size(bcx.ccx(), content_ty) {
2260 bcx.fcx.schedule_free_value(scope, datum.val, cleanup::HeapExchange,
2267 // If we had an rvalue in, we produce an rvalue out.
2268 let (llptr, kind) = match datum.kind {
2270 (Load(bcx, datum.val), LvalueExpr)
2272 RvalueExpr(Rvalue { mode: ByRef }) => {
2273 (Load(bcx, datum.val), RvalueExpr(Rvalue::new(ByRef)))
2275 RvalueExpr(Rvalue { mode: ByValue }) => {
2276 (datum.val, RvalueExpr(Rvalue::new(ByRef)))
2280 let datum = Datum { ty: content_ty, val: llptr, kind: kind };
2281 DatumBlock { bcx: bcx, datum: datum }