1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! # Translation of Expressions
13 //! The expr module handles translation of expressions. The most general
14 //! translation routine is `trans()`, which will translate an expression
15 //! into a datum. `trans_into()` is also available, which will translate
16 //! an expression and write the result directly into memory, sometimes
17 //! avoiding the need for a temporary stack slot. Finally,
18 //! `trans_to_lvalue()` is available if you'd like to ensure that the
19 //! result has cleanup scheduled.
21 //! Internally, each of these functions dispatches to various other
22 //! expression functions depending on the kind of expression. We divide
23 //! up expressions into:
25 //! - **Datum expressions:** Those that most naturally yield values.
26 //! Examples would be `22`, `box x`, or `a + b` (when not overloaded).
27 //! - **DPS expressions:** Those that most naturally write into a location
28 //! in memory. Examples would be `foo()` or `Point { x: 3, y: 4 }`.
29 //! - **Statement expressions:** That that do not generate a meaningful
30 //! result. Examples would be `while { ... }` or `return 44`.
32 //! Public entry points:
34 //! - `trans_into(bcx, expr, dest) -> bcx`: evaluates an expression,
35 //! storing the result into `dest`. This is the preferred form, if you
38 //! - `trans(bcx, expr) -> DatumBlock`: evaluates an expression, yielding
39 //! `Datum` with the result. You can then store the datum, inspect
40 //! the value, etc. This may introduce temporaries if the datum is a
43 //! - `trans_to_lvalue(bcx, expr, "...") -> DatumBlock`: evaluates an
44 //! expression and ensures that the result has a cleanup associated with it,
45 //! creating a temporary stack slot if necessary.
47 //! - `trans_local_var -> Datum`: looks up a local variable or upvar.
49 #![allow(non_camel_case_types)]
51 pub use self::cast_kind::*;
52 pub use self::Dest::*;
53 use self::lazy_binop_ty::*;
56 use llvm::{self, ValueRef};
57 use middle::check_const;
59 use middle::mem_categorization::Typer;
60 use middle::subst::{self, Substs};
61 use trans::{_match, adt, asm, base, callee, closure, consts, controlflow};
64 use trans::cleanup::{self, CleanupMethods};
67 use trans::debuginfo::{self, DebugLoc, ToDebugLoc};
71 use trans::monomorphize;
74 use middle::ty::{struct_fields, tup_fields};
75 use middle::ty::{AdjustDerefRef, AdjustReifyFnPointer, AutoUnsafe};
76 use middle::ty::{AutoPtr};
77 use middle::ty::{self, Ty};
78 use middle::ty::MethodCall;
79 use util::common::indenter;
80 use util::ppaux::Repr;
81 use trans::machine::{llsize_of, llsize_of_alloc};
82 use trans::type_::Type;
84 use syntax::{ast, ast_util, codemap};
86 use syntax::parse::token;
87 use std::iter::repeat;
93 // These are passed around by the code generating functions to track the
94 // destination of a computation's value.
96 #[derive(Copy, PartialEq)]
103 pub fn to_string(&self, ccx: &CrateContext) -> String {
105 SaveIn(v) => format!("SaveIn({})", ccx.tn().val_to_string(v)),
106 Ignore => "Ignore".to_string()
111 /// This function is equivalent to `trans(bcx, expr).store_to_dest(dest)` but it may generate
112 /// better optimized LLVM code.
113 pub fn trans_into<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
116 -> Block<'blk, 'tcx> {
119 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
121 if bcx.tcx().adjustments.borrow().contains_key(&expr.id) {
122 // use trans, which may be less efficient but
123 // which will perform the adjustments:
124 let datum = unpack_datum!(bcx, trans(bcx, expr));
125 return datum.store_to_dest(bcx, dest, expr.id);
128 let qualif = bcx.tcx().const_qualif_map.borrow()[expr.id];
129 if !qualif.intersects(check_const::NOT_CONST | check_const::NEEDS_DROP) {
130 if !qualif.intersects(check_const::PREFER_IN_PLACE) {
131 if let SaveIn(lldest) = dest {
132 let global = consts::get_const_expr_as_global(bcx.ccx(), expr, qualif,
133 bcx.fcx.param_substs);
134 // Cast pointer to destination, because constants
135 // have different types.
136 let lldest = PointerCast(bcx, lldest, val_ty(global));
137 memcpy_ty(bcx, lldest, global, expr_ty_adjusted(bcx, expr));
139 // Don't do anything in the Ignore case, consts don't need drop.
142 // The only way we're going to see a `const` at this point is if
143 // it prefers in-place instantiation, likely because it contains
144 // `[x; N]` somewhere within.
146 ast::ExprPath(_) | ast::ExprQPath(_) => {
147 match bcx.def(expr.id) {
148 def::DefConst(did) => {
149 let expr = consts::get_const_expr(bcx.ccx(), did, expr);
150 // Temporarily get cleanup scopes out of the way,
151 // as they require sub-expressions to be contained
152 // inside the current AST scope.
153 // These should record no cleanups anyways, `const`
154 // can't have destructors.
155 let scopes = mem::replace(&mut *bcx.fcx.scopes.borrow_mut(),
157 bcx = trans_into(bcx, expr, dest);
158 let scopes = mem::replace(&mut *bcx.fcx.scopes.borrow_mut(),
160 assert!(scopes.is_empty());
171 debug!("trans_into() expr={}", expr.repr(bcx.tcx()));
173 let cleanup_debug_loc = debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
177 bcx.fcx.push_ast_cleanup_scope(cleanup_debug_loc);
179 let kind = ty::expr_kind(bcx.tcx(), expr);
181 ty::LvalueExpr | ty::RvalueDatumExpr => {
182 trans_unadjusted(bcx, expr).store_to_dest(dest, expr.id)
184 ty::RvalueDpsExpr => {
185 trans_rvalue_dps_unadjusted(bcx, expr, dest)
187 ty::RvalueStmtExpr => {
188 trans_rvalue_stmt_unadjusted(bcx, expr)
192 bcx.fcx.pop_and_trans_ast_cleanup_scope(bcx, expr.id)
195 /// Translates an expression, returning a datum (and new block) encapsulating the result. When
196 /// possible, it is preferred to use `trans_into`, as that may avoid creating a temporary on the
198 pub fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
200 -> DatumBlock<'blk, 'tcx, Expr> {
201 debug!("trans(expr={})", bcx.expr_to_string(expr));
205 let qualif = bcx.tcx().const_qualif_map.borrow()[expr.id];
206 let adjusted_global = !qualif.intersects(check_const::NON_STATIC_BORROWS);
207 let global = if !qualif.intersects(check_const::NOT_CONST | check_const::NEEDS_DROP) {
208 let global = consts::get_const_expr_as_global(bcx.ccx(), expr, qualif,
209 bcx.fcx.param_substs);
211 if qualif.intersects(check_const::HAS_STATIC_BORROWS) {
212 // Is borrowed as 'static, must return lvalue.
214 // Cast pointer to global, because constants have different types.
215 let const_ty = expr_ty_adjusted(bcx, expr);
216 let llty = type_of::type_of(bcx.ccx(), const_ty);
217 let global = PointerCast(bcx, global, llty.ptr_to());
218 let datum = Datum::new(global, const_ty, Lvalue);
219 return DatumBlock::new(bcx, datum.to_expr_datum());
222 // Otherwise, keep around and perform adjustments, if needed.
223 let const_ty = if adjusted_global {
224 expr_ty_adjusted(bcx, expr)
229 // This could use a better heuristic.
230 Some(if type_is_immediate(bcx.ccx(), const_ty) {
231 // Cast pointer to global, because constants have different types.
232 let llty = type_of::type_of(bcx.ccx(), const_ty);
233 let global = PointerCast(bcx, global, llty.ptr_to());
234 // Maybe just get the value directly, instead of loading it?
235 immediate_rvalue(load_ty(bcx, global, const_ty), const_ty)
237 let llty = type_of::type_of(bcx.ccx(), const_ty);
238 // HACK(eddyb) get around issues with lifetime intrinsics.
239 let scratch = alloca_no_lifetime(bcx, llty, "const");
240 let lldest = if !ty::type_is_structural(const_ty) {
241 // Cast pointer to slot, because constants have different types.
242 PointerCast(bcx, scratch, val_ty(global))
244 // In this case, memcpy_ty calls llvm.memcpy after casting both
245 // source and destination to i8*, so we don't need any casts.
248 memcpy_ty(bcx, lldest, global, const_ty);
249 Datum::new(scratch, const_ty, Rvalue::new(ByRef))
255 let cleanup_debug_loc = debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
259 fcx.push_ast_cleanup_scope(cleanup_debug_loc);
260 let datum = match global {
261 Some(rvalue) => rvalue.to_expr_datum(),
262 None => unpack_datum!(bcx, trans_unadjusted(bcx, expr))
264 let datum = if adjusted_global {
265 datum // trans::consts already performed adjustments.
267 unpack_datum!(bcx, apply_adjustments(bcx, expr, datum))
269 bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, expr.id);
270 return DatumBlock::new(bcx, datum);
273 pub fn get_len(bcx: Block, fat_ptr: ValueRef) -> ValueRef {
274 GEPi(bcx, fat_ptr, &[0, abi::FAT_PTR_EXTRA])
277 pub fn get_dataptr(bcx: Block, fat_ptr: ValueRef) -> ValueRef {
278 GEPi(bcx, fat_ptr, &[0, abi::FAT_PTR_ADDR])
281 // Retrieve the information we are losing (making dynamic) in an unsizing
283 // When making a dtor, we need to do different things depending on the
284 // ownership of the object.. mk_ty is a function for turning `unadjusted_ty`
285 // into a type to be destructed. If we want to end up with a Box pointer,
286 // then mk_ty should make a Box pointer (T -> Box<T>), if we want a
287 // borrowed reference then it should be T -> &T.
288 pub fn unsized_info<'a, 'tcx, F>(ccx: &CrateContext<'a, 'tcx>,
289 kind: &ty::UnsizeKind<'tcx>,
291 unadjusted_ty: Ty<'tcx>,
292 param_substs: &'tcx subst::Substs<'tcx>,
293 mk_ty: F) -> ValueRef where
294 F: FnOnce(Ty<'tcx>) -> Ty<'tcx>,
296 // FIXME(#19596) workaround: `|t| t` causes monomorphization recursion
297 fn identity<T>(t: T) -> T { t }
299 debug!("unsized_info(kind={:?}, id={}, unadjusted_ty={})",
300 kind, id, unadjusted_ty.repr(ccx.tcx()));
302 &ty::UnsizeLength(len) => C_uint(ccx, len),
303 &ty::UnsizeStruct(box ref k, tp_index) => match unadjusted_ty.sty {
304 ty::ty_struct(_, ref substs) => {
305 let ty_substs = substs.types.get_slice(subst::TypeSpace);
306 // The dtor for a field treats it like a value, so mk_ty
307 // should just be the identity function.
308 unsized_info(ccx, k, id, ty_substs[tp_index], param_substs, identity)
310 _ => ccx.sess().bug(&format!("UnsizeStruct with bad sty: {}",
311 unadjusted_ty.repr(ccx.tcx()))[])
313 &ty::UnsizeVtable(ty::TyTrait { ref principal, .. }, _) => {
314 // Note that we preserve binding levels here:
315 let substs = principal.0.substs.with_self_ty(unadjusted_ty).erase_regions();
316 let substs = ccx.tcx().mk_substs(substs);
317 let trait_ref = ty::Binder(Rc::new(ty::TraitRef { def_id: principal.def_id(),
319 let trait_ref = monomorphize::apply_param_substs(ccx.tcx(),
322 let box_ty = mk_ty(unadjusted_ty);
323 consts::ptrcast(meth::get_vtable(ccx, box_ty, trait_ref, param_substs),
324 Type::vtable_ptr(ccx))
329 /// Helper for trans that apply adjustments from `expr` to `datum`, which should be the unadjusted
330 /// translation of `expr`.
331 fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
333 datum: Datum<'tcx, Expr>)
334 -> DatumBlock<'blk, 'tcx, Expr> {
336 let mut datum = datum;
337 let adjustment = match bcx.tcx().adjustments.borrow().get(&expr.id).cloned() {
339 return DatumBlock::new(bcx, datum);
343 debug!("unadjusted datum for expr {}: {}, adjustment={}",
344 expr.repr(bcx.tcx()),
345 datum.to_string(bcx.ccx()),
346 adjustment.repr(bcx.tcx()));
348 AdjustReifyFnPointer(_def_id) => {
349 // FIXME(#19925) once fn item types are
350 // zero-sized, we'll need to do something here
352 AdjustDerefRef(ref adj) => {
353 let (autoderefs, use_autoref) = match adj.autoref {
354 // Extracting a value from a box counts as a deref, but if we are
355 // just converting Box<[T, ..n]> to Box<[T]> we aren't really doing
356 // a deref (and wouldn't if we could treat Box like a normal struct).
357 Some(ty::AutoUnsizeUniq(..)) => (adj.autoderefs - 1, true),
358 // We are a bit paranoid about adjustments and thus might have a re-
359 // borrow here which merely derefs and then refs again (it might have
360 // a different region or mutability, but we don't care here. It might
361 // also be just in case we need to unsize. But if there are no nested
362 // adjustments then it should be a no-op).
363 Some(ty::AutoPtr(_, _, None)) |
364 Some(ty::AutoUnsafe(_, None)) if adj.autoderefs == 1 => {
366 // Don't skip a conversion from Box<T> to &T, etc.
368 let method_call = MethodCall::autoderef(expr.id, adj.autoderefs-1);
369 let method = bcx.tcx().method_map.borrow().get(&method_call).is_some();
371 // Don't skip an overloaded deref.
372 (adj.autoderefs, true)
374 (adj.autoderefs - 1, false)
377 _ => (adj.autoderefs, true),
380 _ => (adj.autoderefs, true)
385 let lval = unpack_datum!(bcx, datum.to_lvalue_datum(bcx, "auto_deref", expr.id));
386 datum = unpack_datum!(
387 bcx, deref_multiple(bcx, expr, lval.to_expr_datum(), autoderefs));
390 // (You might think there is a more elegant way to do this than a
391 // use_autoref bool, but then you remember that the borrow checker exists).
392 if let (true, &Some(ref a)) = (use_autoref, &adj.autoref) {
393 datum = unpack_datum!(bcx, apply_autoref(a,
400 debug!("after adjustments, datum={}", datum.to_string(bcx.ccx()));
401 return DatumBlock::new(bcx, datum);
403 fn apply_autoref<'blk, 'tcx>(autoref: &ty::AutoRef<'tcx>,
404 bcx: Block<'blk, 'tcx>,
406 datum: Datum<'tcx, Expr>)
407 -> DatumBlock<'blk, 'tcx, Expr> {
409 let mut datum = datum;
411 let datum = match autoref {
412 &AutoPtr(_, _, ref a) | &AutoUnsafe(_, ref a) => {
414 if let &Some(box ref a) = a {
415 datum = unpack_datum!(bcx, apply_autoref(a, bcx, expr, datum));
417 if !type_is_sized(bcx.tcx(), datum.ty) {
419 let lval = unpack_datum!(bcx,
420 datum.to_lvalue_datum(bcx, "ref_fat_ptr", expr.id));
421 unpack_datum!(bcx, ref_fat_ptr(bcx, lval))
423 unpack_datum!(bcx, auto_ref(bcx, datum, expr))
426 &ty::AutoUnsize(ref k) => {
427 debug!(" AutoUnsize");
428 unpack_datum!(bcx, unsize_expr(bcx, expr, datum, k))
431 &ty::AutoUnsizeUniq(ty::UnsizeLength(len)) => {
432 debug!(" AutoUnsizeUniq(UnsizeLength)");
433 unpack_datum!(bcx, unsize_unique_vec(bcx, expr, datum, len))
435 &ty::AutoUnsizeUniq(ref k) => {
436 debug!(" AutoUnsizeUniq");
437 unpack_datum!(bcx, unsize_unique_expr(bcx, expr, datum, k))
441 DatumBlock::new(bcx, datum)
444 fn unsize_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
446 datum: Datum<'tcx, Expr>,
447 k: &ty::UnsizeKind<'tcx>)
448 -> DatumBlock<'blk, 'tcx, Expr> {
451 let datum_ty = datum.ty;
452 let unsized_ty = ty::unsize_ty(tcx, datum_ty, k, expr.span);
453 debug!("unsized_ty={}", unsized_ty.repr(bcx.tcx()));
454 let dest_ty = ty::mk_open(tcx, unsized_ty);
455 debug!("dest_ty={}", unsized_ty.repr(bcx.tcx()));
457 let info = unsized_info(bcx.ccx(), k, expr.id, datum_ty, bcx.fcx.param_substs,
458 |t| ty::mk_imm_rptr(tcx, tcx.mk_region(ty::ReStatic), t));
461 let lval = unpack_datum!(bcx,
462 datum.to_lvalue_datum(bcx, "into_fat_ptr", expr.id));
463 // Compute the base pointer. This doesn't change the pointer value,
464 // but merely its type.
465 let base = match *k {
466 ty::UnsizeStruct(..) | ty::UnsizeVtable(..) => {
467 PointerCast(bcx, lval.val, type_of::type_of(bcx.ccx(), unsized_ty).ptr_to())
469 ty::UnsizeLength(..) => {
470 GEPi(bcx, lval.val, &[0, 0])
474 let scratch = rvalue_scratch_datum(bcx, dest_ty, "__fat_ptr");
475 Store(bcx, base, get_dataptr(bcx, scratch.val));
476 Store(bcx, info, get_len(bcx, scratch.val));
478 DatumBlock::new(bcx, scratch.to_expr_datum())
481 fn unsize_unique_vec<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
483 datum: Datum<'tcx, Expr>,
485 -> DatumBlock<'blk, 'tcx, Expr> {
489 let datum_ty = datum.ty;
491 debug!("unsize_unique_vec expr.id={} datum_ty={} len={}",
492 expr.id, datum_ty.repr(tcx), len);
494 // We do not arrange cleanup ourselves; if we already are an
495 // L-value, then cleanup will have already been scheduled (and
496 // the `datum.store_to` call below will emit code to zero the
497 // drop flag when moving out of the L-value). If we are an R-value,
498 // then we do not need to schedule cleanup.
500 let ll_len = C_uint(bcx.ccx(), len);
501 let unit_ty = ty::sequence_element_type(tcx, ty::type_content(datum_ty));
502 let vec_ty = ty::mk_uniq(tcx, ty::mk_vec(tcx, unit_ty, None));
503 let scratch = rvalue_scratch_datum(bcx, vec_ty, "__unsize_unique");
505 let base = get_dataptr(bcx, scratch.val);
506 let base = PointerCast(bcx,
508 type_of::type_of(bcx.ccx(), datum_ty).ptr_to());
509 bcx = datum.store_to(bcx, base);
511 Store(bcx, ll_len, get_len(bcx, scratch.val));
512 DatumBlock::new(bcx, scratch.to_expr_datum())
515 fn unsize_unique_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
517 datum: Datum<'tcx, Expr>,
518 k: &ty::UnsizeKind<'tcx>)
519 -> DatumBlock<'blk, 'tcx, Expr> {
523 let datum_ty = datum.ty;
524 let unboxed_ty = match datum_ty.sty {
526 _ => bcx.sess().bug(&format!("Expected ty_uniq, found {}",
527 bcx.ty_to_string(datum_ty))[])
529 let result_ty = ty::mk_uniq(tcx, ty::unsize_ty(tcx, unboxed_ty, k, expr.span));
531 // We do not arrange cleanup ourselves; if we already are an
532 // L-value, then cleanup will have already been scheduled (and
533 // the `datum.store_to` call below will emit code to zero the
534 // drop flag when moving out of the L-value). If we are an R-value,
535 // then we do not need to schedule cleanup.
537 let scratch = rvalue_scratch_datum(bcx, result_ty, "__uniq_fat_ptr");
538 let llbox_ty = type_of::type_of(bcx.ccx(), datum_ty);
539 let base = PointerCast(bcx, get_dataptr(bcx, scratch.val), llbox_ty.ptr_to());
540 bcx = datum.store_to(bcx, base);
542 let info = unsized_info(bcx.ccx(), k, expr.id, unboxed_ty, bcx.fcx.param_substs,
543 |t| ty::mk_uniq(tcx, t));
544 Store(bcx, info, get_len(bcx, scratch.val));
546 DatumBlock::new(bcx, scratch.to_expr_datum())
550 /// Translates an expression in "lvalue" mode -- meaning that it returns a reference to the memory
551 /// that the expr represents.
553 /// If this expression is an rvalue, this implies introducing a temporary. In other words,
554 /// something like `x().f` is translated into roughly the equivalent of
556 /// { tmp = x(); tmp.f }
557 pub fn trans_to_lvalue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
560 -> DatumBlock<'blk, 'tcx, Lvalue> {
562 let datum = unpack_datum!(bcx, trans(bcx, expr));
563 return datum.to_lvalue_datum(bcx, name, expr.id);
566 /// A version of `trans` that ignores adjustments. You almost certainly do not want to call this
568 fn trans_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
570 -> DatumBlock<'blk, 'tcx, Expr> {
573 debug!("trans_unadjusted(expr={})", bcx.expr_to_string(expr));
574 let _indenter = indenter();
576 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
578 return match ty::expr_kind(bcx.tcx(), expr) {
579 ty::LvalueExpr | ty::RvalueDatumExpr => {
580 let datum = unpack_datum!(bcx, {
581 trans_datum_unadjusted(bcx, expr)
584 DatumBlock {bcx: bcx, datum: datum}
587 ty::RvalueStmtExpr => {
588 bcx = trans_rvalue_stmt_unadjusted(bcx, expr);
589 nil(bcx, expr_ty(bcx, expr))
592 ty::RvalueDpsExpr => {
593 let ty = expr_ty(bcx, expr);
594 if type_is_zero_size(bcx.ccx(), ty) {
595 bcx = trans_rvalue_dps_unadjusted(bcx, expr, Ignore);
598 let scratch = rvalue_scratch_datum(bcx, ty, "");
599 bcx = trans_rvalue_dps_unadjusted(
600 bcx, expr, SaveIn(scratch.val));
602 // Note: this is not obviously a good idea. It causes
603 // immediate values to be loaded immediately after a
604 // return from a call or other similar expression,
605 // which in turn leads to alloca's having shorter
606 // lifetimes and hence larger stack frames. However,
607 // in turn it can lead to more register pressure.
608 // Still, in practice it seems to increase
609 // performance, since we have fewer problems with
611 let scratch = unpack_datum!(
612 bcx, scratch.to_appropriate_datum(bcx));
614 DatumBlock::new(bcx, scratch.to_expr_datum())
619 fn nil<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ty: Ty<'tcx>)
620 -> DatumBlock<'blk, 'tcx, Expr> {
621 let llval = C_undef(type_of::type_of(bcx.ccx(), ty));
622 let datum = immediate_rvalue(llval, ty);
623 DatumBlock::new(bcx, datum.to_expr_datum())
627 fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
629 -> DatumBlock<'blk, 'tcx, Expr> {
632 let _icx = push_ctxt("trans_datum_unadjusted");
635 ast::ExprParen(ref e) => {
638 ast::ExprPath(_) | ast::ExprQPath(_) => {
639 trans_def(bcx, expr, bcx.def(expr.id))
641 ast::ExprField(ref base, ident) => {
642 trans_rec_field(bcx, &**base, ident.node)
644 ast::ExprTupField(ref base, idx) => {
645 trans_rec_tup_field(bcx, &**base, idx.node)
647 ast::ExprIndex(ref base, ref idx) => {
648 trans_index(bcx, expr, &**base, &**idx, MethodCall::expr(expr.id))
650 ast::ExprBox(_, ref contents) => {
651 // Special case for `Box<T>`
652 let box_ty = expr_ty(bcx, expr);
653 let contents_ty = expr_ty(bcx, &**contents);
656 trans_uniq_expr(bcx, expr, box_ty, &**contents, contents_ty)
658 _ => bcx.sess().span_bug(expr.span,
659 "expected unique box")
663 ast::ExprLit(ref lit) => trans_immediate_lit(bcx, expr, &**lit),
664 ast::ExprBinary(op, ref lhs, ref rhs) => {
665 trans_binary(bcx, expr, op, &**lhs, &**rhs)
667 ast::ExprUnary(op, ref x) => {
668 trans_unary(bcx, expr, op, &**x)
670 ast::ExprAddrOf(_, ref x) => {
672 ast::ExprRepeat(..) | ast::ExprVec(..) => {
673 // Special case for slices.
674 let cleanup_debug_loc =
675 debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
679 fcx.push_ast_cleanup_scope(cleanup_debug_loc);
680 let datum = unpack_datum!(
681 bcx, tvec::trans_slice_vec(bcx, expr, &**x));
682 bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, x.id);
683 DatumBlock::new(bcx, datum)
686 trans_addr_of(bcx, expr, &**x)
690 ast::ExprCast(ref val, _) => {
691 // Datum output mode means this is a scalar cast:
692 trans_imm_cast(bcx, &**val, expr.id)
695 bcx.tcx().sess.span_bug(
697 &format!("trans_rvalue_datum_unadjusted reached \
698 fall-through case: {:?}",
704 fn trans_field<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
707 -> DatumBlock<'blk, 'tcx, Expr> where
708 F: FnOnce(&'blk ty::ctxt<'tcx>, &[ty::field<'tcx>]) -> uint,
711 let _icx = push_ctxt("trans_rec_field");
713 let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, base, "field"));
714 let bare_ty = ty::unopen_type(base_datum.ty);
715 let repr = adt::represent_type(bcx.ccx(), bare_ty);
716 with_field_tys(bcx.tcx(), bare_ty, None, move |discr, field_tys| {
717 let ix = get_idx(bcx.tcx(), field_tys);
718 let d = base_datum.get_element(
721 |srcval| adt::trans_field_ptr(bcx, &*repr, srcval, discr, ix));
723 if type_is_sized(bcx.tcx(), d.ty) {
724 DatumBlock { datum: d.to_expr_datum(), bcx: bcx }
726 let scratch = rvalue_scratch_datum(bcx, ty::mk_open(bcx.tcx(), d.ty), "");
727 Store(bcx, d.val, get_dataptr(bcx, scratch.val));
728 let info = Load(bcx, get_len(bcx, base_datum.val));
729 Store(bcx, info, get_len(bcx, scratch.val));
731 DatumBlock::new(bcx, scratch.to_expr_datum())
738 /// Translates `base.field`.
739 fn trans_rec_field<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
742 -> DatumBlock<'blk, 'tcx, Expr> {
743 trans_field(bcx, base, |tcx, field_tys| ty::field_idx_strict(tcx, field.name, field_tys))
746 /// Translates `base.<idx>`.
747 fn trans_rec_tup_field<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
750 -> DatumBlock<'blk, 'tcx, Expr> {
751 trans_field(bcx, base, |_, _| idx)
754 fn trans_index<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
755 index_expr: &ast::Expr,
758 method_call: MethodCall)
759 -> DatumBlock<'blk, 'tcx, Expr> {
760 //! Translates `base[idx]`.
762 let _icx = push_ctxt("trans_index");
766 let index_expr_debug_loc = index_expr.debug_loc();
768 // Check for overloaded index.
769 let method_ty = ccx.tcx()
773 .map(|method| method.ty);
774 let elt_datum = match method_ty {
776 let method_ty = monomorphize_type(bcx, method_ty);
778 let base_datum = unpack_datum!(bcx, trans(bcx, base));
780 // Translate index expression.
781 let ix_datum = unpack_datum!(bcx, trans(bcx, idx));
783 let ref_ty = // invoked methods have LB regions instantiated:
784 ty::no_late_bound_regions(
785 bcx.tcx(), &ty::ty_fn_ret(method_ty)).unwrap().unwrap();
786 let elt_ty = match ty::deref(ref_ty, true) {
788 bcx.tcx().sess.span_bug(index_expr.span,
789 "index method didn't return a \
790 dereferenceable type?!")
792 Some(elt_tm) => elt_tm.ty,
795 // Overloaded. Evaluate `trans_overloaded_op`, which will
796 // invoke the user's index() method, which basically yields
797 // a `&T` pointer. We can then proceed down the normal
798 // path (below) to dereference that `&T`.
799 let scratch = rvalue_scratch_datum(bcx, ref_ty, "overloaded_index_elt");
801 trans_overloaded_op(bcx,
805 vec![(ix_datum, idx.id)],
806 Some(SaveIn(scratch.val)),
808 let datum = scratch.to_expr_datum();
809 if type_is_sized(bcx.tcx(), elt_ty) {
810 Datum::new(datum.to_llscalarish(bcx), elt_ty, LvalueExpr)
812 Datum::new(datum.val, ty::mk_open(bcx.tcx(), elt_ty), LvalueExpr)
816 let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx,
820 // Translate index expression and cast to a suitable LLVM integer.
821 // Rust is less strict than LLVM in this regard.
822 let ix_datum = unpack_datum!(bcx, trans(bcx, idx));
823 let ix_val = ix_datum.to_llscalarish(bcx);
824 let ix_size = machine::llbitsize_of_real(bcx.ccx(),
826 let int_size = machine::llbitsize_of_real(bcx.ccx(),
829 if ix_size < int_size {
830 if ty::type_is_signed(expr_ty(bcx, idx)) {
831 SExt(bcx, ix_val, ccx.int_type())
832 } else { ZExt(bcx, ix_val, ccx.int_type()) }
833 } else if ix_size > int_size {
834 Trunc(bcx, ix_val, ccx.int_type())
842 ty::sequence_element_type(bcx.tcx(),
845 let (base, len) = base_datum.get_vec_base_and_len(bcx);
847 debug!("trans_index: base {}", bcx.val_to_string(base));
848 debug!("trans_index: len {}", bcx.val_to_string(len));
850 let bounds_check = ICmp(bcx,
854 index_expr_debug_loc);
855 let expect = ccx.get_intrinsic(&("llvm.expect.i1"));
856 let expected = Call(bcx,
858 &[bounds_check, C_bool(ccx, false)],
860 index_expr_debug_loc);
861 bcx = with_cond(bcx, expected, |bcx| {
862 controlflow::trans_fail_bounds_check(bcx,
863 expr_info(index_expr),
867 let elt = InBoundsGEP(bcx, base, &[ix_val]);
868 let elt = PointerCast(bcx, elt, vt.llunit_ty.ptr_to());
869 Datum::new(elt, vt.unit_ty, LvalueExpr)
873 DatumBlock::new(bcx, elt_datum)
876 fn trans_def<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
877 ref_expr: &ast::Expr,
879 -> DatumBlock<'blk, 'tcx, Expr> {
880 //! Translates a reference to a path.
882 let _icx = push_ctxt("trans_def_lvalue");
884 def::DefFn(..) | def::DefStaticMethod(..) | def::DefMethod(..) |
885 def::DefStruct(_) | def::DefVariant(..) => {
886 let datum = trans_def_fn_unadjusted(bcx.ccx(), ref_expr, def,
887 bcx.fcx.param_substs);
888 DatumBlock::new(bcx, datum.to_expr_datum())
890 def::DefStatic(did, _) => {
891 // There are two things that may happen here:
892 // 1) If the static item is defined in this crate, it will be
893 // translated using `get_item_val`, and we return a pointer to
895 // 2) If the static item is defined in another crate then we add
896 // (or reuse) a declaration of an external global, and return a
898 let const_ty = expr_ty(bcx, ref_expr);
900 // For external constants, we don't inline.
901 let val = if did.krate == ast::LOCAL_CRATE {
904 // The LLVM global has the type of its initializer,
905 // which may not be equal to the enum's type for
907 let val = base::get_item_val(bcx.ccx(), did.node);
908 let pty = type_of::type_of(bcx.ccx(), const_ty).ptr_to();
909 PointerCast(bcx, val, pty)
912 base::get_extern_const(bcx.ccx(), did, const_ty)
914 DatumBlock::new(bcx, Datum::new(val, const_ty, LvalueExpr))
916 def::DefConst(_) => {
917 bcx.sess().span_bug(ref_expr.span,
918 "constant expression should not reach expr::trans_def")
921 DatumBlock::new(bcx, trans_local_var(bcx, def).to_expr_datum())
926 fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
928 -> Block<'blk, 'tcx> {
930 let _icx = push_ctxt("trans_rvalue_stmt");
932 if bcx.unreachable.get() {
936 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
939 ast::ExprParen(ref e) => {
940 trans_into(bcx, &**e, Ignore)
942 ast::ExprBreak(label_opt) => {
943 controlflow::trans_break(bcx, expr, label_opt)
945 ast::ExprAgain(label_opt) => {
946 controlflow::trans_cont(bcx, expr, label_opt)
948 ast::ExprRet(ref ex) => {
949 // Check to see if the return expression itself is reachable.
950 // This can occur when the inner expression contains a return
951 let reachable = if let Some(ref cfg) = bcx.fcx.cfg {
952 cfg.node_is_reachable(expr.id)
958 controlflow::trans_ret(bcx, expr, ex.as_ref().map(|e| &**e))
960 // If it's not reachable, just translate the inner expression
961 // directly. This avoids having to manage a return slot when
962 // it won't actually be used anyway.
963 if let &Some(ref x) = ex {
964 bcx = trans_into(bcx, &**x, Ignore);
966 // Mark the end of the block as unreachable. Once we get to
967 // a return expression, there's no more we should be doing
973 ast::ExprWhile(ref cond, ref body, _) => {
974 controlflow::trans_while(bcx, expr, &**cond, &**body)
976 ast::ExprLoop(ref body, _) => {
977 controlflow::trans_loop(bcx, expr, &**body)
979 ast::ExprAssign(ref dst, ref src) => {
980 let src_datum = unpack_datum!(bcx, trans(bcx, &**src));
981 let dst_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, &**dst, "assign"));
983 if type_needs_drop(bcx.tcx(), dst_datum.ty) {
984 // If there are destructors involved, make sure we
985 // are copying from an rvalue, since that cannot possible
986 // alias an lvalue. We are concerned about code like:
994 // where e.g. a : Option<Foo> and a.b :
995 // Option<Foo>. In that case, freeing `a` before the
996 // assignment may also free `a.b`!
998 // We could avoid this intermediary with some analysis
999 // to determine whether `dst` may possibly own `src`.
1000 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
1001 let src_datum = unpack_datum!(
1002 bcx, src_datum.to_rvalue_datum(bcx, "ExprAssign"));
1003 bcx = glue::drop_ty(bcx,
1007 src_datum.store_to(bcx, dst_datum.val)
1009 src_datum.store_to(bcx, dst_datum.val)
1012 ast::ExprAssignOp(op, ref dst, ref src) => {
1013 trans_assign_op(bcx, expr, op, &**dst, &**src)
1015 ast::ExprInlineAsm(ref a) => {
1016 asm::trans_inline_asm(bcx, a)
1019 bcx.tcx().sess.span_bug(
1021 &format!("trans_rvalue_stmt_unadjusted reached \
1022 fall-through case: {:?}",
1028 fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1031 -> Block<'blk, 'tcx> {
1032 let _icx = push_ctxt("trans_rvalue_dps_unadjusted");
1034 let tcx = bcx.tcx();
1036 debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
1039 ast::ExprParen(ref e) => {
1040 trans_into(bcx, &**e, dest)
1042 ast::ExprPath(_) | ast::ExprQPath(_) => {
1043 trans_def_dps_unadjusted(bcx, expr, bcx.def(expr.id), dest)
1045 ast::ExprIf(ref cond, ref thn, ref els) => {
1046 controlflow::trans_if(bcx, expr.id, &**cond, &**thn, els.as_ref().map(|e| &**e), dest)
1048 ast::ExprMatch(ref discr, ref arms, _) => {
1049 _match::trans_match(bcx, expr, &**discr, &arms[..], dest)
1051 ast::ExprBlock(ref blk) => {
1052 controlflow::trans_block(bcx, &**blk, dest)
1054 ast::ExprStruct(_, ref fields, ref base) => {
1057 base.as_ref().map(|e| &**e),
1060 node_id_type(bcx, expr.id),
1063 ast::ExprRange(ref start, ref end) => {
1064 // FIXME it is just not right that we are synthesising ast nodes in
1066 fn make_field(field_name: &str, expr: P<ast::Expr>) -> ast::Field {
1068 ident: codemap::dummy_spanned(token::str_to_ident(field_name)),
1070 span: codemap::DUMMY_SP,
1074 // A range just desugars into a struct.
1075 // Note that the type of the start and end may not be the same, but
1076 // they should only differ in their lifetime, which should not matter
1078 let (did, fields, ty_params) = match (start, end) {
1079 (&Some(ref start), &Some(ref end)) => {
1081 let fields = vec![make_field("start", start.clone()),
1082 make_field("end", end.clone())];
1083 (tcx.lang_items.range_struct(), fields, vec![node_id_type(bcx, start.id)])
1085 (&Some(ref start), &None) => {
1086 // Desugar to RangeFrom
1087 let fields = vec![make_field("start", start.clone())];
1088 (tcx.lang_items.range_from_struct(), fields, vec![node_id_type(bcx, start.id)])
1090 (&None, &Some(ref end)) => {
1091 // Desugar to RangeTo
1092 let fields = vec![make_field("end", end.clone())];
1093 (tcx.lang_items.range_to_struct(), fields, vec![node_id_type(bcx, end.id)])
1096 // Desugar to RangeFull
1097 (tcx.lang_items.range_full_struct(), vec![], vec![])
1101 if let Some(did) = did {
1102 let substs = Substs::new_type(ty_params, vec![]);
1108 ty::mk_struct(tcx, did, tcx.mk_substs(substs)),
1111 tcx.sess.span_bug(expr.span,
1112 "No lang item for ranges (how did we get this far?)")
1115 ast::ExprTup(ref args) => {
1116 let numbered_fields: Vec<(uint, &ast::Expr)> =
1117 args.iter().enumerate().map(|(i, arg)| (i, &**arg)).collect();
1121 &numbered_fields[..],
1126 ast::ExprLit(ref lit) => {
1128 ast::LitStr(ref s, _) => {
1129 tvec::trans_lit_str(bcx, expr, (*s).clone(), dest)
1134 .span_bug(expr.span,
1135 "trans_rvalue_dps_unadjusted shouldn't be \
1136 translating this type of literal")
1140 ast::ExprVec(..) | ast::ExprRepeat(..) => {
1141 tvec::trans_fixed_vstore(bcx, expr, dest)
1143 ast::ExprClosure(_, ref decl, ref body) => {
1144 let dest = match dest {
1145 SaveIn(lldest) => closure::Dest::SaveIn(bcx, lldest),
1146 Ignore => closure::Dest::Ignore(bcx.ccx())
1148 closure::trans_closure_expr(dest, &**decl, &**body, expr.id, bcx.fcx.param_substs)
1151 ast::ExprCall(ref f, ref args) => {
1152 if bcx.tcx().is_method_call(expr.id) {
1153 trans_overloaded_call(bcx,
1159 callee::trans_call(bcx,
1162 callee::ArgExprs(&args[..]),
1166 ast::ExprMethodCall(_, _, ref args) => {
1167 callee::trans_method_call(bcx,
1170 callee::ArgExprs(&args[..]),
1173 ast::ExprBinary(op, ref lhs, ref rhs) => {
1174 // if not overloaded, would be RvalueDatumExpr
1175 let lhs = unpack_datum!(bcx, trans(bcx, &**lhs));
1176 let rhs_datum = unpack_datum!(bcx, trans(bcx, &**rhs));
1177 trans_overloaded_op(bcx, expr, MethodCall::expr(expr.id), lhs,
1178 vec![(rhs_datum, rhs.id)], Some(dest),
1179 !ast_util::is_by_value_binop(op.node)).bcx
1181 ast::ExprUnary(op, ref subexpr) => {
1182 // if not overloaded, would be RvalueDatumExpr
1183 let arg = unpack_datum!(bcx, trans(bcx, &**subexpr));
1184 trans_overloaded_op(bcx, expr, MethodCall::expr(expr.id),
1185 arg, Vec::new(), Some(dest), !ast_util::is_by_value_unop(op)).bcx
1187 ast::ExprIndex(ref base, ref idx) => {
1188 // if not overloaded, would be RvalueDatumExpr
1189 let base = unpack_datum!(bcx, trans(bcx, &**base));
1190 let idx_datum = unpack_datum!(bcx, trans(bcx, &**idx));
1191 trans_overloaded_op(bcx, expr, MethodCall::expr(expr.id), base,
1192 vec![(idx_datum, idx.id)], Some(dest), true).bcx
1194 ast::ExprCast(ref val, _) => {
1195 // DPS output mode means this is a trait cast:
1196 if ty::type_is_trait(node_id_type(bcx, expr.id)) {
1198 bcx.tcx().object_cast_map.borrow()
1202 let trait_ref = bcx.monomorphize(&trait_ref);
1203 let datum = unpack_datum!(bcx, trans(bcx, &**val));
1204 meth::trans_trait_cast(bcx, datum, expr.id,
1207 bcx.tcx().sess.span_bug(expr.span,
1208 "expr_cast of non-trait");
1211 ast::ExprAssignOp(op, ref dst, ref src) => {
1212 trans_assign_op(bcx, expr, op, &**dst, &**src)
1215 bcx.tcx().sess.span_bug(
1217 &format!("trans_rvalue_dps_unadjusted reached fall-through \
1224 fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1225 ref_expr: &ast::Expr,
1228 -> Block<'blk, 'tcx> {
1229 let _icx = push_ctxt("trans_def_dps_unadjusted");
1231 let lldest = match dest {
1232 SaveIn(lldest) => lldest,
1233 Ignore => { return bcx; }
1237 def::DefVariant(tid, vid, _) => {
1238 let variant_info = ty::enum_variant_with_id(bcx.tcx(), tid, vid);
1239 if variant_info.args.len() > 0 {
1241 let llfn = callee::trans_fn_ref(bcx.ccx(), vid,
1242 ExprId(ref_expr.id),
1243 bcx.fcx.param_substs).val;
1244 Store(bcx, llfn, lldest);
1248 let ty = expr_ty(bcx, ref_expr);
1249 let repr = adt::represent_type(bcx.ccx(), ty);
1250 adt::trans_set_discr(bcx, &*repr, lldest,
1251 variant_info.disr_val);
1255 def::DefStruct(_) => {
1256 let ty = expr_ty(bcx, ref_expr);
1258 ty::ty_struct(did, _) if ty::has_dtor(bcx.tcx(), did) => {
1259 let repr = adt::represent_type(bcx.ccx(), ty);
1260 adt::trans_set_discr(bcx, &*repr, lldest, 0);
1267 bcx.tcx().sess.span_bug(ref_expr.span, &format!(
1268 "Non-DPS def {:?} referened by {}",
1269 def, bcx.node_id_to_string(ref_expr.id))[]);
1274 pub fn trans_def_fn_unadjusted<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1275 ref_expr: &ast::Expr,
1277 param_substs: &'tcx subst::Substs<'tcx>)
1278 -> Datum<'tcx, Rvalue> {
1279 let _icx = push_ctxt("trans_def_datum_unadjusted");
1282 def::DefFn(did, _) |
1283 def::DefStruct(did) | def::DefVariant(_, did, _) |
1284 def::DefStaticMethod(did, def::FromImpl(_)) |
1285 def::DefMethod(did, _, def::FromImpl(_)) => {
1286 callee::trans_fn_ref(ccx, did, ExprId(ref_expr.id), param_substs)
1288 def::DefStaticMethod(impl_did, def::FromTrait(trait_did)) |
1289 def::DefMethod(impl_did, _, def::FromTrait(trait_did)) => {
1290 meth::trans_static_method_callee(ccx, impl_did,
1291 trait_did, ref_expr.id,
1295 ccx.tcx().sess.span_bug(ref_expr.span, &format!(
1296 "trans_def_fn_unadjusted invoked on: {:?} for {}",
1298 ref_expr.repr(ccx.tcx()))[]);
1303 /// Translates a reference to a local variable or argument. This always results in an lvalue datum.
1304 pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1306 -> Datum<'tcx, Lvalue> {
1307 let _icx = push_ctxt("trans_local_var");
1310 def::DefUpvar(nid, _) => {
1311 // Can't move upvars, so this is never a ZeroMemLastUse.
1312 let local_ty = node_id_type(bcx, nid);
1313 match bcx.fcx.llupvars.borrow().get(&nid) {
1314 Some(&val) => Datum::new(val, local_ty, Lvalue),
1316 bcx.sess().bug(&format!(
1317 "trans_local_var: no llval for upvar {} found",
1322 def::DefLocal(nid) => {
1323 let datum = match bcx.fcx.lllocals.borrow().get(&nid) {
1326 bcx.sess().bug(&format!(
1327 "trans_local_var: no datum for local/arg {} found",
1331 debug!("take_local(nid={}, v={}, ty={})",
1332 nid, bcx.val_to_string(datum.val), bcx.ty_to_string(datum.ty));
1336 bcx.sess().unimpl(&format!(
1337 "unsupported def type in trans_local_var: {:?}",
1343 /// Helper for enumerating the field types of structs, enums, or records. The optional node ID here
1344 /// is the node ID of the path identifying the enum variant in use. If none, this cannot possibly
1345 /// an enum variant (so, if it is and `node_id_opt` is none, this function panics).
1346 pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>,
1348 node_id_opt: Option<ast::NodeId>,
1351 F: FnOnce(ty::Disr, &[ty::field<'tcx>]) -> R,
1354 ty::ty_struct(did, substs) => {
1355 let fields = struct_fields(tcx, did, substs);
1356 let fields = monomorphize::normalize_associated_type(tcx, &fields);
1360 ty::ty_tup(ref v) => {
1361 op(0, &tup_fields(&v[..])[])
1364 ty::ty_enum(_, substs) => {
1365 // We want the *variant* ID here, not the enum ID.
1368 tcx.sess.bug(&format!(
1369 "cannot get field types from the enum type {} \
1374 let def = tcx.def_map.borrow()[node_id].clone();
1376 def::DefVariant(enum_id, variant_id, _) => {
1377 let variant_info = ty::enum_variant_with_id(
1378 tcx, enum_id, variant_id);
1379 let fields = struct_fields(tcx, variant_id, substs);
1380 let fields = monomorphize::normalize_associated_type(tcx, &fields);
1381 op(variant_info.disr_val, &fields[..])
1384 tcx.sess.bug("resolve didn't map this expr to a \
1393 tcx.sess.bug(&format!(
1394 "cannot get field types from the type {}",
1400 fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1401 fields: &[ast::Field],
1402 base: Option<&ast::Expr>,
1403 expr_span: codemap::Span,
1404 expr_id: ast::NodeId,
1406 dest: Dest) -> Block<'blk, 'tcx> {
1407 let _icx = push_ctxt("trans_rec");
1409 let tcx = bcx.tcx();
1410 with_field_tys(tcx, ty, Some(expr_id), |discr, field_tys| {
1411 let mut need_base: Vec<bool> = repeat(true).take(field_tys.len()).collect();
1413 let numbered_fields = fields.iter().map(|field| {
1415 field_tys.iter().position(|field_ty|
1416 field_ty.name == field.ident.node.name);
1417 let result = match opt_pos {
1419 need_base[i] = false;
1423 tcx.sess.span_bug(field.span,
1424 "Couldn't find field in struct type")
1428 }).collect::<Vec<_>>();
1429 let optbase = match base {
1430 Some(base_expr) => {
1431 let mut leftovers = Vec::new();
1432 for (i, b) in need_base.iter().enumerate() {
1434 leftovers.push((i, field_tys[i].mt.ty));
1437 Some(StructBaseInfo {expr: base_expr,
1438 fields: leftovers })
1441 if need_base.iter().any(|b| *b) {
1442 tcx.sess.span_bug(expr_span, "missing fields and no base expr")
1454 DebugLoc::At(expr_id, expr_span))
1458 /// Information that `trans_adt` needs in order to fill in the fields
1459 /// of a struct copied from a base struct (e.g., from an expression
1460 /// like `Foo { a: b, ..base }`.
1462 /// Note that `fields` may be empty; the base expression must always be
1463 /// evaluated for side-effects.
1464 pub struct StructBaseInfo<'a, 'tcx> {
1465 /// The base expression; will be evaluated after all explicit fields.
1466 expr: &'a ast::Expr,
1467 /// The indices of fields to copy paired with their types.
1468 fields: Vec<(uint, Ty<'tcx>)>
1471 /// Constructs an ADT instance:
1473 /// - `fields` should be a list of field indices paired with the
1474 /// expression to store into that field. The initializers will be
1475 /// evaluated in the order specified by `fields`.
1477 /// - `optbase` contains information on the base struct (if any) from
1478 /// which remaining fields are copied; see comments on `StructBaseInfo`.
1479 pub fn trans_adt<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1482 fields: &[(uint, &ast::Expr)],
1483 optbase: Option<StructBaseInfo<'a, 'tcx>>,
1485 debug_location: DebugLoc)
1486 -> Block<'blk, 'tcx> {
1487 let _icx = push_ctxt("trans_adt");
1489 let repr = adt::represent_type(bcx.ccx(), ty);
1491 debug_location.apply(bcx.fcx);
1493 // If we don't care about the result, just make a
1494 // temporary stack slot
1495 let addr = match dest {
1497 Ignore => alloc_ty(bcx, ty, "temp"),
1500 // This scope holds intermediates that must be cleaned should
1501 // panic occur before the ADT as a whole is ready.
1502 let custom_cleanup_scope = fcx.push_custom_cleanup_scope();
1504 // First we trans the base, if we have one, to the dest
1505 if let Some(base) = optbase {
1506 assert_eq!(discr, 0);
1508 match ty::expr_kind(bcx.tcx(), &*base.expr) {
1509 ty::RvalueDpsExpr | ty::RvalueDatumExpr if !type_needs_drop(bcx.tcx(), ty) => {
1510 bcx = trans_into(bcx, &*base.expr, SaveIn(addr));
1512 ty::RvalueStmtExpr => bcx.tcx().sess.bug("unexpected expr kind for struct base expr"),
1514 let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, &*base.expr, "base"));
1515 for &(i, t) in &base.fields {
1516 let datum = base_datum.get_element(
1517 bcx, t, |srcval| adt::trans_field_ptr(bcx, &*repr, srcval, discr, i));
1518 assert!(type_is_sized(bcx.tcx(), datum.ty));
1519 let dest = adt::trans_field_ptr(bcx, &*repr, addr, discr, i);
1520 bcx = datum.store_to(bcx, dest);
1526 debug_location.apply(bcx.fcx);
1528 if ty::type_is_simd(bcx.tcx(), ty) {
1529 // This is the constructor of a SIMD type, such types are
1530 // always primitive machine types and so do not have a
1531 // destructor or require any clean-up.
1532 let llty = type_of::type_of(bcx.ccx(), ty);
1534 // keep a vector as a register, and running through the field
1535 // `insertelement`ing them directly into that register
1536 // (i.e. avoid GEPi and `store`s to an alloca) .
1537 let mut vec_val = C_undef(llty);
1539 for &(i, ref e) in fields {
1540 let block_datum = trans(bcx, &**e);
1541 bcx = block_datum.bcx;
1542 let position = C_uint(bcx.ccx(), i);
1543 let value = block_datum.datum.to_llscalarish(bcx);
1544 vec_val = InsertElement(bcx, vec_val, value, position);
1546 Store(bcx, vec_val, addr);
1548 // Now, we just overwrite the fields we've explicitly specified
1549 for &(i, ref e) in fields {
1550 let dest = adt::trans_field_ptr(bcx, &*repr, addr, discr, i);
1551 let e_ty = expr_ty_adjusted(bcx, &**e);
1552 bcx = trans_into(bcx, &**e, SaveIn(dest));
1553 let scope = cleanup::CustomScope(custom_cleanup_scope);
1554 fcx.schedule_lifetime_end(scope, dest);
1555 fcx.schedule_drop_mem(scope, dest, e_ty);
1559 adt::trans_set_discr(bcx, &*repr, addr, discr);
1561 fcx.pop_custom_cleanup_scope(custom_cleanup_scope);
1563 // If we don't care about the result drop the temporary we made
1567 bcx = glue::drop_ty(bcx, addr, ty, debug_location);
1568 base::call_lifetime_end(bcx, addr);
1575 fn trans_immediate_lit<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1578 -> DatumBlock<'blk, 'tcx, Expr> {
1579 // must not be a string constant, that is a RvalueDpsExpr
1580 let _icx = push_ctxt("trans_immediate_lit");
1581 let ty = expr_ty(bcx, expr);
1582 let v = consts::const_lit(bcx.ccx(), expr, lit);
1583 immediate_rvalue_bcx(bcx, v, ty).to_expr_datumblock()
1586 fn trans_unary<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1589 sub_expr: &ast::Expr)
1590 -> DatumBlock<'blk, 'tcx, Expr> {
1591 let ccx = bcx.ccx();
1593 let _icx = push_ctxt("trans_unary_datum");
1595 let method_call = MethodCall::expr(expr.id);
1597 // The only overloaded operator that is translated to a datum
1598 // is an overloaded deref, since it is always yields a `&T`.
1599 // Otherwise, we should be in the RvalueDpsExpr path.
1601 op == ast::UnDeref ||
1602 !ccx.tcx().method_map.borrow().contains_key(&method_call));
1604 let un_ty = expr_ty(bcx, expr);
1606 let debug_loc = expr.debug_loc();
1610 let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
1611 let llresult = Not(bcx, datum.to_llscalarish(bcx), debug_loc);
1612 immediate_rvalue_bcx(bcx, llresult, un_ty).to_expr_datumblock()
1615 let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
1616 let val = datum.to_llscalarish(bcx);
1618 if ty::type_is_fp(un_ty) {
1619 FNeg(bcx, val, debug_loc)
1621 Neg(bcx, val, debug_loc)
1624 immediate_rvalue_bcx(bcx, llneg, un_ty).to_expr_datumblock()
1627 trans_uniq_expr(bcx, expr, un_ty, sub_expr, expr_ty(bcx, sub_expr))
1630 let datum = unpack_datum!(bcx, trans(bcx, sub_expr));
1631 deref_once(bcx, expr, datum, method_call)
1636 fn trans_uniq_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1637 box_expr: &ast::Expr,
1639 contents: &ast::Expr,
1640 contents_ty: Ty<'tcx>)
1641 -> DatumBlock<'blk, 'tcx, Expr> {
1642 let _icx = push_ctxt("trans_uniq_expr");
1644 assert!(type_is_sized(bcx.tcx(), contents_ty));
1645 let llty = type_of::type_of(bcx.ccx(), contents_ty);
1646 let size = llsize_of(bcx.ccx(), llty);
1647 let align = C_uint(bcx.ccx(), type_of::align_of(bcx.ccx(), contents_ty));
1648 let llty_ptr = llty.ptr_to();
1649 let Result { bcx, val } = malloc_raw_dyn(bcx,
1654 box_expr.debug_loc());
1655 // Unique boxes do not allocate for zero-size types. The standard library
1656 // may assume that `free` is never called on the pointer returned for
1657 // `Box<ZeroSizeType>`.
1658 let bcx = if llsize_of_alloc(bcx.ccx(), llty) == 0 {
1659 trans_into(bcx, contents, SaveIn(val))
1661 let custom_cleanup_scope = fcx.push_custom_cleanup_scope();
1662 fcx.schedule_free_value(cleanup::CustomScope(custom_cleanup_scope),
1663 val, cleanup::HeapExchange, contents_ty);
1664 let bcx = trans_into(bcx, contents, SaveIn(val));
1665 fcx.pop_custom_cleanup_scope(custom_cleanup_scope);
1668 immediate_rvalue_bcx(bcx, val, box_ty).to_expr_datumblock()
1671 fn ref_fat_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1672 lval: Datum<'tcx, Lvalue>)
1673 -> DatumBlock<'blk, 'tcx, Expr> {
1674 let dest_ty = ty::close_type(bcx.tcx(), lval.ty);
1675 let scratch = rvalue_scratch_datum(bcx, dest_ty, "__fat_ptr");
1676 memcpy_ty(bcx, scratch.val, lval.val, scratch.ty);
1678 DatumBlock::new(bcx, scratch.to_expr_datum())
1681 fn trans_addr_of<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1683 subexpr: &ast::Expr)
1684 -> DatumBlock<'blk, 'tcx, Expr> {
1685 let _icx = push_ctxt("trans_addr_of");
1687 let sub_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, subexpr, "addr_of"));
1688 match sub_datum.ty.sty {
1690 // Opened DST value, close to a fat pointer
1691 ref_fat_ptr(bcx, sub_datum)
1694 // Sized value, ref to a thin pointer
1695 let ty = expr_ty(bcx, expr);
1696 immediate_rvalue_bcx(bcx, sub_datum.val, ty).to_expr_datumblock()
1701 // Important to get types for both lhs and rhs, because one might be _|_
1702 // and the other not.
1703 fn trans_eager_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1704 binop_expr: &ast::Expr,
1711 -> DatumBlock<'blk, 'tcx, Expr> {
1712 let _icx = push_ctxt("trans_eager_binop");
1714 let tcx = bcx.tcx();
1715 let is_simd = ty::type_is_simd(tcx, lhs_t);
1716 let intype = if is_simd {
1717 ty::simd_type(tcx, lhs_t)
1721 let is_float = ty::type_is_fp(intype);
1722 let is_signed = ty::type_is_signed(intype);
1724 let rhs = base::cast_shift_expr_rhs(bcx, op, lhs, rhs);
1726 let binop_debug_loc = binop_expr.debug_loc();
1729 let val = match op.node {
1732 FAdd(bcx, lhs, rhs, binop_debug_loc)
1734 Add(bcx, lhs, rhs, binop_debug_loc)
1739 FSub(bcx, lhs, rhs, binop_debug_loc)
1741 Sub(bcx, lhs, rhs, binop_debug_loc)
1746 FMul(bcx, lhs, rhs, binop_debug_loc)
1748 Mul(bcx, lhs, rhs, binop_debug_loc)
1753 FDiv(bcx, lhs, rhs, binop_debug_loc)
1755 // Only zero-check integers; fp /0 is NaN
1756 bcx = base::fail_if_zero_or_overflows(bcx,
1757 expr_info(binop_expr),
1763 SDiv(bcx, lhs, rhs, binop_debug_loc)
1765 UDiv(bcx, lhs, rhs, binop_debug_loc)
1771 FRem(bcx, lhs, rhs, binop_debug_loc)
1773 // Only zero-check integers; fp %0 is NaN
1774 bcx = base::fail_if_zero_or_overflows(bcx,
1775 expr_info(binop_expr),
1776 op, lhs, rhs, rhs_t);
1778 SRem(bcx, lhs, rhs, binop_debug_loc)
1780 URem(bcx, lhs, rhs, binop_debug_loc)
1784 ast::BiBitOr => Or(bcx, lhs, rhs, binop_debug_loc),
1785 ast::BiBitAnd => And(bcx, lhs, rhs, binop_debug_loc),
1786 ast::BiBitXor => Xor(bcx, lhs, rhs, binop_debug_loc),
1787 ast::BiShl => Shl(bcx, lhs, rhs, binop_debug_loc),
1790 AShr(bcx, lhs, rhs, binop_debug_loc)
1792 LShr(bcx, lhs, rhs, binop_debug_loc)
1795 ast::BiEq | ast::BiNe | ast::BiLt | ast::BiGe | ast::BiLe | ast::BiGt => {
1797 base::compare_simd_types(bcx, lhs, rhs, intype, op.node, binop_debug_loc)
1799 base::compare_scalar_types(bcx, lhs, rhs, intype, op.node, binop_debug_loc)
1803 bcx.tcx().sess.span_bug(binop_expr.span, "unexpected binop");
1807 immediate_rvalue_bcx(bcx, val, binop_ty).to_expr_datumblock()
1810 // refinement types would obviate the need for this
1811 enum lazy_binop_ty {
1816 fn trans_lazy_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1817 binop_expr: &ast::Expr,
1821 -> DatumBlock<'blk, 'tcx, Expr> {
1822 let _icx = push_ctxt("trans_lazy_binop");
1823 let binop_ty = expr_ty(bcx, binop_expr);
1826 let DatumBlock {bcx: past_lhs, datum: lhs} = trans(bcx, a);
1827 let lhs = lhs.to_llscalarish(past_lhs);
1829 if past_lhs.unreachable.get() {
1830 return immediate_rvalue_bcx(past_lhs, lhs, binop_ty).to_expr_datumblock();
1833 let join = fcx.new_id_block("join", binop_expr.id);
1834 let before_rhs = fcx.new_id_block("before_rhs", b.id);
1837 lazy_and => CondBr(past_lhs, lhs, before_rhs.llbb, join.llbb, DebugLoc::None),
1838 lazy_or => CondBr(past_lhs, lhs, join.llbb, before_rhs.llbb, DebugLoc::None)
1841 let DatumBlock {bcx: past_rhs, datum: rhs} = trans(before_rhs, b);
1842 let rhs = rhs.to_llscalarish(past_rhs);
1844 if past_rhs.unreachable.get() {
1845 return immediate_rvalue_bcx(join, lhs, binop_ty).to_expr_datumblock();
1848 Br(past_rhs, join.llbb, DebugLoc::None);
1849 let phi = Phi(join, Type::i1(bcx.ccx()), &[lhs, rhs],
1850 &[past_lhs.llbb, past_rhs.llbb]);
1852 return immediate_rvalue_bcx(join, phi, binop_ty).to_expr_datumblock();
1855 fn trans_binary<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1860 -> DatumBlock<'blk, 'tcx, Expr> {
1861 let _icx = push_ctxt("trans_binary");
1862 let ccx = bcx.ccx();
1864 // if overloaded, would be RvalueDpsExpr
1865 assert!(!ccx.tcx().method_map.borrow().contains_key(&MethodCall::expr(expr.id)));
1869 trans_lazy_binop(bcx, expr, lazy_and, lhs, rhs)
1872 trans_lazy_binop(bcx, expr, lazy_or, lhs, rhs)
1876 let lhs_datum = unpack_datum!(bcx, trans(bcx, lhs));
1877 let rhs_datum = unpack_datum!(bcx, trans(bcx, rhs));
1878 let binop_ty = expr_ty(bcx, expr);
1880 debug!("trans_binary (expr {}): lhs_datum={}",
1882 lhs_datum.to_string(ccx));
1883 let lhs_ty = lhs_datum.ty;
1884 let lhs = lhs_datum.to_llscalarish(bcx);
1886 debug!("trans_binary (expr {}): rhs_datum={}",
1888 rhs_datum.to_string(ccx));
1889 let rhs_ty = rhs_datum.ty;
1890 let rhs = rhs_datum.to_llscalarish(bcx);
1891 trans_eager_binop(bcx, expr, binop_ty, op,
1892 lhs_ty, lhs, rhs_ty, rhs)
1897 fn trans_overloaded_op<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
1899 method_call: MethodCall,
1900 lhs: Datum<'tcx, Expr>,
1901 rhs: Vec<(Datum<'tcx, Expr>, ast::NodeId)>,
1904 -> Result<'blk, 'tcx> {
1905 let method_ty = (*bcx.tcx().method_map.borrow())[method_call].ty;
1906 callee::trans_call_inner(bcx,
1908 monomorphize_type(bcx, method_ty),
1909 |bcx, arg_cleanup_scope| {
1910 meth::trans_method_callee(bcx,
1915 callee::ArgOverloadedOp(lhs, rhs, autoref),
1919 fn trans_overloaded_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
1921 callee: &'a ast::Expr,
1922 args: &'a [P<ast::Expr>],
1924 -> Block<'blk, 'tcx> {
1925 let method_call = MethodCall::expr(expr.id);
1926 let method_type = (*bcx.tcx()
1928 .borrow())[method_call]
1930 let mut all_args = vec!(callee);
1931 all_args.extend(args.iter().map(|e| &**e));
1933 callee::trans_call_inner(bcx,
1935 monomorphize_type(bcx,
1937 |bcx, arg_cleanup_scope| {
1938 meth::trans_method_callee(
1944 callee::ArgOverloadedCall(all_args),
1949 fn int_cast(bcx: Block,
1955 let _icx = push_ctxt("int_cast");
1956 let srcsz = llsrctype.int_width();
1957 let dstsz = lldsttype.int_width();
1958 return if dstsz == srcsz {
1959 BitCast(bcx, llsrc, lldsttype)
1960 } else if srcsz > dstsz {
1961 TruncOrBitCast(bcx, llsrc, lldsttype)
1963 SExtOrBitCast(bcx, llsrc, lldsttype)
1965 ZExtOrBitCast(bcx, llsrc, lldsttype)
1969 fn float_cast(bcx: Block,
1974 let _icx = push_ctxt("float_cast");
1975 let srcsz = llsrctype.float_width();
1976 let dstsz = lldsttype.float_width();
1977 return if dstsz > srcsz {
1978 FPExt(bcx, llsrc, lldsttype)
1979 } else if srcsz > dstsz {
1980 FPTrunc(bcx, llsrc, lldsttype)
1984 #[derive(Copy, PartialEq, Debug)]
1985 pub enum cast_kind {
1993 pub fn cast_type_kind<'tcx>(tcx: &ty::ctxt<'tcx>, t: Ty<'tcx>) -> cast_kind {
1995 ty::ty_char => cast_integral,
1996 ty::ty_float(..) => cast_float,
1997 ty::ty_rptr(_, mt) | ty::ty_ptr(mt) => {
1998 if type_is_sized(tcx, mt.ty) {
2004 ty::ty_bare_fn(..) => cast_pointer,
2005 ty::ty_int(..) => cast_integral,
2006 ty::ty_uint(..) => cast_integral,
2007 ty::ty_bool => cast_integral,
2008 ty::ty_enum(..) => cast_enum,
2013 pub fn cast_is_noop<'tcx>(t_in: Ty<'tcx>, t_out: Ty<'tcx>) -> bool {
2014 match (ty::deref(t_in, true), ty::deref(t_out, true)) {
2015 (Some(ty::mt{ ty: t_in, .. }), Some(ty::mt{ ty: t_out, .. })) => {
2022 fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2025 -> DatumBlock<'blk, 'tcx, Expr> {
2026 let _icx = push_ctxt("trans_cast");
2028 let ccx = bcx.ccx();
2030 let t_in = expr_ty(bcx, expr);
2031 let t_out = node_id_type(bcx, id);
2032 let k_in = cast_type_kind(bcx.tcx(), t_in);
2033 let k_out = cast_type_kind(bcx.tcx(), t_out);
2034 let s_in = k_in == cast_integral && ty::type_is_signed(t_in);
2035 let ll_t_in = type_of::arg_type_of(ccx, t_in);
2036 let ll_t_out = type_of::arg_type_of(ccx, t_out);
2038 // Convert the value to be cast into a ValueRef, either by-ref or
2039 // by-value as appropriate given its type:
2040 let mut datum = unpack_datum!(bcx, trans(bcx, expr));
2042 if cast_is_noop(datum.ty, t_out) {
2044 return DatumBlock::new(bcx, datum);
2047 let newval = match (k_in, k_out) {
2048 (cast_integral, cast_integral) => {
2049 let llexpr = datum.to_llscalarish(bcx);
2050 int_cast(bcx, ll_t_out, ll_t_in, llexpr, s_in)
2052 (cast_float, cast_float) => {
2053 let llexpr = datum.to_llscalarish(bcx);
2054 float_cast(bcx, ll_t_out, ll_t_in, llexpr)
2056 (cast_integral, cast_float) => {
2057 let llexpr = datum.to_llscalarish(bcx);
2059 SIToFP(bcx, llexpr, ll_t_out)
2060 } else { UIToFP(bcx, llexpr, ll_t_out) }
2062 (cast_float, cast_integral) => {
2063 let llexpr = datum.to_llscalarish(bcx);
2064 if ty::type_is_signed(t_out) {
2065 FPToSI(bcx, llexpr, ll_t_out)
2066 } else { FPToUI(bcx, llexpr, ll_t_out) }
2068 (cast_integral, cast_pointer) => {
2069 let llexpr = datum.to_llscalarish(bcx);
2070 IntToPtr(bcx, llexpr, ll_t_out)
2072 (cast_pointer, cast_integral) => {
2073 let llexpr = datum.to_llscalarish(bcx);
2074 PtrToInt(bcx, llexpr, ll_t_out)
2076 (cast_pointer, cast_pointer) => {
2077 let llexpr = datum.to_llscalarish(bcx);
2078 PointerCast(bcx, llexpr, ll_t_out)
2080 (cast_enum, cast_integral) |
2081 (cast_enum, cast_float) => {
2083 let repr = adt::represent_type(ccx, t_in);
2084 let datum = unpack_datum!(
2085 bcx, datum.to_lvalue_datum(bcx, "trans_imm_cast", expr.id));
2086 let llexpr_ptr = datum.to_llref();
2088 adt::trans_get_discr(bcx, &*repr, llexpr_ptr, Some(Type::i64(ccx)));
2090 cast_integral => int_cast(bcx, ll_t_out,
2091 val_ty(lldiscrim_a),
2093 cast_float => SIToFP(bcx, lldiscrim_a, ll_t_out),
2095 ccx.sess().bug(&format!("translating unsupported cast: \
2096 {} ({:?}) -> {} ({:?})",
2097 t_in.repr(bcx.tcx()),
2099 t_out.repr(bcx.tcx()),
2104 _ => ccx.sess().bug(&format!("translating unsupported cast: \
2105 {} ({:?}) -> {} ({:?})",
2106 t_in.repr(bcx.tcx()),
2108 t_out.repr(bcx.tcx()),
2111 return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock();
2114 fn trans_assign_op<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2119 -> Block<'blk, 'tcx> {
2120 let _icx = push_ctxt("trans_assign_op");
2123 debug!("trans_assign_op(expr={})", bcx.expr_to_string(expr));
2125 // User-defined operator methods cannot be used with `+=` etc right now
2126 assert!(!bcx.tcx().method_map.borrow().contains_key(&MethodCall::expr(expr.id)));
2128 // Evaluate LHS (destination), which should be an lvalue
2129 let dst_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, dst, "assign_op"));
2130 assert!(!type_needs_drop(bcx.tcx(), dst_datum.ty));
2131 let dst_ty = dst_datum.ty;
2132 let dst = load_ty(bcx, dst_datum.val, dst_datum.ty);
2135 let rhs_datum = unpack_datum!(bcx, trans(bcx, &*src));
2136 let rhs_ty = rhs_datum.ty;
2137 let rhs = rhs_datum.to_llscalarish(bcx);
2139 // Perform computation and store the result
2140 let result_datum = unpack_datum!(
2141 bcx, trans_eager_binop(bcx, expr, dst_datum.ty, op,
2142 dst_ty, dst, rhs_ty, rhs));
2143 return result_datum.store_to(bcx, dst_datum.val);
2146 fn auto_ref<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2147 datum: Datum<'tcx, Expr>,
2149 -> DatumBlock<'blk, 'tcx, Expr> {
2152 // Ensure cleanup of `datum` if not already scheduled and obtain
2153 // a "by ref" pointer.
2154 let lv_datum = unpack_datum!(bcx, datum.to_lvalue_datum(bcx, "autoref", expr.id));
2156 // Compute final type. Note that we are loose with the region and
2157 // mutability, since those things don't matter in trans.
2158 let referent_ty = lv_datum.ty;
2159 let ptr_ty = ty::mk_imm_rptr(bcx.tcx(), bcx.tcx().mk_region(ty::ReStatic), referent_ty);
2162 let llref = lv_datum.to_llref();
2164 // Construct the resulting datum, using what was the "by ref"
2165 // ValueRef of type `referent_ty` to be the "by value" ValueRef
2166 // of type `&referent_ty`.
2167 DatumBlock::new(bcx, Datum::new(llref, ptr_ty, RvalueExpr(Rvalue::new(ByValue))))
2170 fn deref_multiple<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2172 datum: Datum<'tcx, Expr>,
2174 -> DatumBlock<'blk, 'tcx, Expr> {
2176 let mut datum = datum;
2178 let method_call = MethodCall::autoderef(expr.id, i);
2179 datum = unpack_datum!(bcx, deref_once(bcx, expr, datum, method_call));
2181 DatumBlock { bcx: bcx, datum: datum }
2184 fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2186 datum: Datum<'tcx, Expr>,
2187 method_call: MethodCall)
2188 -> DatumBlock<'blk, 'tcx, Expr> {
2189 let ccx = bcx.ccx();
2191 debug!("deref_once(expr={}, datum={}, method_call={:?})",
2192 expr.repr(bcx.tcx()),
2193 datum.to_string(ccx),
2198 // Check for overloaded deref.
2199 let method_ty = ccx.tcx().method_map.borrow()
2200 .get(&method_call).map(|method| method.ty);
2201 let datum = match method_ty {
2202 Some(method_ty) => {
2203 let method_ty = monomorphize_type(bcx, method_ty);
2205 // Overloaded. Evaluate `trans_overloaded_op`, which will
2206 // invoke the user's deref() method, which basically
2207 // converts from the `Smaht<T>` pointer that we have into
2208 // a `&T` pointer. We can then proceed down the normal
2209 // path (below) to dereference that `&T`.
2210 let datum = match method_call.adjustment {
2211 // Always perform an AutoPtr when applying an overloaded auto-deref
2212 ty::AutoDeref(_) => unpack_datum!(bcx, auto_ref(bcx, datum, expr)),
2216 let ref_ty = // invoked methods have their LB regions instantiated
2217 ty::no_late_bound_regions(
2218 ccx.tcx(), &ty::ty_fn_ret(method_ty)).unwrap().unwrap();
2219 let scratch = rvalue_scratch_datum(bcx, ref_ty, "overloaded_deref");
2221 unpack_result!(bcx, trans_overloaded_op(bcx, expr, method_call,
2222 datum, Vec::new(), Some(SaveIn(scratch.val)),
2224 scratch.to_expr_datum()
2227 // Not overloaded. We already have a pointer we know how to deref.
2232 let r = match datum.ty.sty {
2233 ty::ty_uniq(content_ty) => {
2234 if type_is_sized(bcx.tcx(), content_ty) {
2235 deref_owned_pointer(bcx, expr, datum, content_ty)
2237 // A fat pointer and an opened DST value have the same
2238 // representation just different types. Since there is no
2239 // temporary for `*e` here (because it is unsized), we cannot
2240 // emulate the sized object code path for running drop glue and
2241 // free. Instead, we schedule cleanup for `e`, turning it into
2243 let datum = unpack_datum!(
2244 bcx, datum.to_lvalue_datum(bcx, "deref", expr.id));
2246 let datum = Datum::new(datum.val, ty::mk_open(bcx.tcx(), content_ty), LvalueExpr);
2247 DatumBlock::new(bcx, datum)
2251 ty::ty_ptr(ty::mt { ty: content_ty, .. }) |
2252 ty::ty_rptr(_, ty::mt { ty: content_ty, .. }) => {
2253 if type_is_sized(bcx.tcx(), content_ty) {
2254 let ptr = datum.to_llscalarish(bcx);
2256 // Always generate an lvalue datum, even if datum.mode is
2257 // an rvalue. This is because datum.mode is only an
2258 // rvalue for non-owning pointers like &T or *T, in which
2259 // case cleanup *is* scheduled elsewhere, by the true
2260 // owner (or, in the case of *T, by the user).
2261 DatumBlock::new(bcx, Datum::new(ptr, content_ty, LvalueExpr))
2263 // A fat pointer and an opened DST value have the same representation
2264 // just different types.
2265 DatumBlock::new(bcx, Datum::new(datum.val,
2266 ty::mk_open(bcx.tcx(), content_ty),
2272 bcx.tcx().sess.span_bug(
2274 &format!("deref invoked on expr of illegal type {}",
2275 datum.ty.repr(bcx.tcx()))[]);
2279 debug!("deref_once(expr={}, method_call={:?}, result={})",
2280 expr.id, method_call, r.datum.to_string(ccx));
2284 /// We microoptimize derefs of owned pointers a bit here. Basically, the idea is to make the
2285 /// deref of an rvalue result in an rvalue. This helps to avoid intermediate stack slots in the
2286 /// resulting LLVM. The idea here is that, if the `Box<T>` pointer is an rvalue, then we can
2287 /// schedule a *shallow* free of the `Box<T>` pointer, and then return a ByRef rvalue into the
2288 /// pointer. Because the free is shallow, it is legit to return an rvalue, because we know that
2289 /// the contents are not yet scheduled to be freed. The language rules ensure that the contents
2290 /// will be used (or moved) before the free occurs.
2291 fn deref_owned_pointer<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
2293 datum: Datum<'tcx, Expr>,
2294 content_ty: Ty<'tcx>)
2295 -> DatumBlock<'blk, 'tcx, Expr> {
2297 RvalueExpr(Rvalue { mode: ByRef }) => {
2298 let scope = cleanup::temporary_scope(bcx.tcx(), expr.id);
2299 let ptr = Load(bcx, datum.val);
2300 if !type_is_zero_size(bcx.ccx(), content_ty) {
2301 bcx.fcx.schedule_free_value(scope, ptr, cleanup::HeapExchange, content_ty);
2304 RvalueExpr(Rvalue { mode: ByValue }) => {
2305 let scope = cleanup::temporary_scope(bcx.tcx(), expr.id);
2306 if !type_is_zero_size(bcx.ccx(), content_ty) {
2307 bcx.fcx.schedule_free_value(scope, datum.val, cleanup::HeapExchange,
2314 // If we had an rvalue in, we produce an rvalue out.
2315 let (llptr, kind) = match datum.kind {
2317 (Load(bcx, datum.val), LvalueExpr)
2319 RvalueExpr(Rvalue { mode: ByRef }) => {
2320 (Load(bcx, datum.val), RvalueExpr(Rvalue::new(ByRef)))
2322 RvalueExpr(Rvalue { mode: ByValue }) => {
2323 (datum.val, RvalueExpr(Rvalue::new(ByRef)))
2327 let datum = Datum { ty: content_ty, val: llptr, kind: kind };
2328 DatumBlock { bcx: bcx, datum: datum }