ret cx;
} else if ty::type_is_nil(tcx, t) || ty::type_is_bot(tcx, t) {
ret cx;
- } else if ty::type_is_boxed(tcx, t) {
+ } else if ty::type_is_boxed(tcx, t) || ty::type_is_unique_box(tcx, t) {
if src.is_mem { src_val = Load(cx, src_val); }
if action == DROP_EXISTING { cx = drop_ty(cx, dst, t); }
Store(cx, src_val, dst);
InBoundsGEP(sub.bcx, sub.val,
[C_int(0), C_int(abi::box_rc_field_body)])
}
- ty::ty_uniq(_) { fail "uniq lval translation unimplemented" }
ty::ty_res(_, _, _) {
InBoundsGEP(sub.bcx, sub.val, [C_int(0), C_int(1)])
}
} else { T_typaram_ptr(ccx.tn) };
PointerCast(sub.bcx, sub.val, ellty)
}
- ty::ty_ptr(_) { sub.val }
+ ty::ty_ptr(_) | ty::ty_uniq(_) { sub.val }
};
ret lval_mem(sub.bcx, val);
}
// immediate-ness of the type.
fn type_is_immediate(ccx: @crate_ctxt, t: ty::t) -> bool {
ret ty::type_is_scalar(ccx.tcx, t) || ty::type_is_boxed(ccx.tcx, t) ||
- ty::type_is_native(ccx.tcx, t);
+ ty::type_is_unique_box(ccx.tcx, t) || ty::type_is_native(ccx.tcx, t);
}
fn do_spill(cx: @block_ctxt, v: ValueRef, t: ty::t) -> result {
node_id: ast::node_id) -> result {
let bcx = cx;
+ let lv = trans_lval(bcx, contents);
+ bcx = lv.bcx;
+
let contents_ty = ty::expr_ty(bcx_tcx(bcx), contents);
let r = size_of(bcx, contents_ty);
bcx = r.bcx;
bcx = r.bcx;
let llptr = r.val;
- let uniq_ty = node_id_type(bcx_ccx(cx), node_id);
- r = alloc_ty(bcx, uniq_ty);
- let llptrptr = r.val;
- bcx = r.bcx;
- Store(bcx, llptr, llptrptr);
+ bcx = move_val_if_temp(bcx, INIT, llptr, lv, contents_ty);
- r = trans_expr_out(bcx, contents, save_in(llptr));
- add_clean_temp(r.bcx, llptrptr, uniq_ty);
- ret rslt(r.bcx, llptrptr);
+ let uniq_ty = node_id_type(bcx_ccx(cx), node_id);
+ add_clean_temp(r.bcx, llptr, uniq_ty);
+ ret rslt(r.bcx, llptr);
}
fn trans_break_cont(sp: span, cx: @block_ctxt, to_end: bool) -> result {
// this will be more involved. For now, we simply zero out the local, and the
// drop glue checks whether it is zero.
fn revoke_clean(cx: @block_ctxt, val: ValueRef, t: ty::t) -> @block_ctxt {
- if ty::type_is_unique(bcx_tcx(cx), t) {
- // Just zero out the allocation. This ensures that the GC won't try to
- // traverse dangling pointers.
- ret trans::zero_alloca(cx, val, t).bcx;
- }
-
let sc_cx = find_scope_cx(cx);
let found = -1;
let i = 0;
export type_is_bot;
export type_is_box;
export type_is_boxed;
+export type_is_unique_box;
export type_is_vec;
export type_is_fp;
export type_allows_implicit_copy;
}
fn type_is_box(cx: ctxt, ty: t) -> bool {
- alt struct(cx, ty) { ty_box(_) { ret true; } _ { ret false; } }
+ alt struct(cx, ty) {
+ ty_box(_) { ret true; }
+ _ { ret false; }
+ }
}
fn type_is_boxed(cx: ctxt, ty: t) -> bool {
- alt struct(cx, ty) { ty_box(_) { ret true; } _ { ret false; } }
+ alt struct(cx, ty) {
+ ty_box(_) { ret true; }
+ _ { ret false; }
+ }
+}
+
+fn type_is_unique_box(cx: ctxt, ty: t) -> bool {
+ alt struct(cx, ty) {
+ ty_uniq(_) { ret true; }
+ _ { ret false; }
+ }
}
fn type_is_vec(cx: ctxt, ty: t) -> bool {