let pat_ty = node_id_type(bcx, pat_id);
let llbox = Load(bcx, val);
let unboxed = match ty::get(pat_ty).sty {
- ty::ty_uniq(*) if !ty::type_contents(bcx.tcx(), pat_ty).contains_managed() => llbox,
+ ty::ty_uniq(*) if !ty::type_contents(bcx.tcx(), pat_ty).owns_managed() => llbox,
_ => GEPi(bcx, llbox, [0u, abi::box_field_body])
};
compile_submatch(bcx, enter_uniq(bcx, dm, m, col, val),
let pat_ty = node_id_type(bcx, pat.id);
let llbox = Load(bcx, val);
let unboxed = match ty::get(pat_ty).sty {
- ty::ty_uniq(*) if !ty::type_contents(bcx.tcx(), pat_ty).contains_managed() => llbox,
+ ty::ty_uniq(*) if !ty::type_contents(bcx.tcx(), pat_ty).owns_managed() => llbox,
_ => GEPi(bcx, llbox, [0u, abi::box_field_body])
};
bcx = bind_irrefutable_pat(bcx, inner, unboxed, binding_mode);
}
pub fn heap_for_unique(bcx: @mut Block, t: ty::t) -> heap {
- if ty::type_contents(bcx.tcx(), t).contains_managed() {
+ if ty::type_contents(bcx.tcx(), t).owns_managed() {
heap_managed_unique
} else {
heap_exchange
}
fn heap_for_unique_closure(bcx: @mut Block, t: ty::t) -> heap {
- if ty::type_contents(bcx.tcx(), t).contains_managed() {
+ if ty::type_contents(bcx.tcx(), t).owns_managed() {
heap_managed_unique
} else {
heap_exchange_closure
}
};
- if !header && !ty::type_contents(bcx.tcx(), content_ty).contains_managed() {
+ if !header && !ty::type_contents(bcx.tcx(), content_ty).owns_managed() {
let ptr = self.to_value_llval(bcx);
let ty = type_of::type_of(bcx.ccx(), content_ty);
let body = PointerCast(bcx, ptr, ty.ptr_to());
ty::vstore_fixed(len) => {
fixed_vec_metadata(cx, mt.ty, len, usage_site_span)
}
- ty::vstore_uniq if ty::type_contents(cx.tcx, mt.ty).contains_managed() => {
+ ty::vstore_uniq if ty::type_contents(cx.tcx, mt.ty).owns_managed() => {
let boxed_vec_metadata = boxed_vec_metadata(cx, mt.ty, usage_site_span);
pointer_type_metadata(cx, t, boxed_vec_metadata)
}
}
}
},
- ty::ty_uniq(ref mt) if ty::type_contents(cx.tcx, mt.ty).contains_managed() => {
+ ty::ty_uniq(ref mt) if ty::type_contents(cx.tcx, mt.ty).owns_managed() => {
create_pointer_to_box_metadata(cx, t, mt.ty)
},
ty::ty_uniq(ref mt) |
let has_header = match ty::get(t).sty {
ty::ty_box(*) => true,
- ty::ty_uniq(*) => ty::type_contents(ccx.tcx, t).contains_managed(),
+ ty::ty_uniq(*) => ty::type_contents(ccx.tcx, t).owns_managed(),
_ => false
};
let tp_ty = substs.tys[0];
Ret(bcx, C_bool(ty::type_needs_drop(ccx.tcx, tp_ty)));
}
- "contains_managed" => {
+ "owns_managed" => {
let tp_ty = substs.tys[0];
- Ret(bcx, C_bool(ty::type_contents(ccx.tcx, tp_ty).contains_managed()));
+ Ret(bcx, C_bool(ty::type_contents(ccx.tcx, tp_ty).owns_managed()));
}
"visit_tydesc" => {
let td = get_param(decl, first_real_arg);
ty::ty_evec(ref mt, vst) => {
let (name, extra) = self.vstore_name_and_extra(t, vst);
let extra = extra + self.c_mt(mt);
- if "uniq" == name && ty::type_contents(bcx.tcx(), t).contains_managed() {
+ if "uniq" == name && ty::type_contents(bcx.tcx(), t).owns_managed() {
self.visit("evec_uniq_managed", extra)
} else {
self.visit(~"evec_" + name, extra)
}
ty::ty_uniq(ref mt) => {
let extra = self.c_mt(mt);
- if ty::type_contents(bcx.tcx(), t).contains_managed() {
+ if ty::type_contents(bcx.tcx(), t).owns_managed() {
self.visit("uniq_managed", extra)
} else {
self.visit("uniq", extra)
}
pub fn get_bodyptr(bcx: @mut Block, vptr: ValueRef, t: ty::t) -> ValueRef {
- if ty::type_contents(bcx.tcx(), t).contains_managed() {
+ if ty::type_contents(bcx.tcx(), t).owns_managed() {
GEPi(bcx, vptr, [0u, abi::box_field_body])
} else {
vptr
ty::ty_opaque_box => Type::opaque_box(cx).ptr_to(),
ty::ty_uniq(ref mt) => {
let ty = type_of(cx, mt.ty);
- if ty::type_contents(cx.tcx, mt.ty).contains_managed() {
+ if ty::type_contents(cx.tcx, mt.ty).owns_managed() {
Type::unique(cx, &ty).ptr_to()
} else {
ty.ptr_to()
ty::ty_evec(ref mt, ty::vstore_uniq) => {
let ty = type_of(cx, mt.ty);
let ty = Type::vec(cx.sess.targ_cfg.arch, &ty);
- if ty::type_contents(cx.tcx, mt.ty).contains_managed() {
+ if ty::type_contents(cx.tcx, mt.ty).owns_managed() {
Type::unique(cx, &ty).ptr_to()
} else {
ty.ptr_to()
let body_datum = box_datum.box_body(bcx);
let bcx = glue::drop_ty(bcx, body_datum.to_ref_llval(bcx),
body_datum.ty);
- if ty::type_contents(bcx.tcx(), box_ty).contains_managed() {
+ if ty::type_contents(bcx.tcx(), box_ty).owns_managed() {
glue::trans_free(bcx, box_datum.val)
} else {
glue::trans_exchange_free(bcx, box_datum.val)
!self.intersects(TC::Nonsendable)
}
- pub fn contains_managed(&self) -> bool {
+ pub fn owns_managed(&self) -> bool {
self.intersects(TC::OwnsManaged)
}
ty::mk_nil())
}
"needs_drop" => (1u, ~[], ty::mk_bool()),
- "contains_managed" => (1u, ~[], ty::mk_bool()),
+ "owns_managed" => (1u, ~[], ty::mk_bool()),
"atomic_xchg" | "atomic_xadd" | "atomic_xsub" |
"atomic_xchg_acq" | "atomic_xadd_acq" | "atomic_xsub_acq" |
"atomic_xchg_rel" | "atomic_xadd_rel" | "atomic_xsub_rel" => {
pub fn needs_drop<T>() -> bool;
/// Returns `true` if a type is managed (will be allocated on the local heap)
+ #[cfg(stage0)]
pub fn contains_managed<T>() -> bool;
+ /// Returns `true` if a type is managed (will be allocated on the local heap)
+ #[cfg(not(stage0))]
+ pub fn owns_managed<T>() -> bool;
+
pub fn visit_tydesc(td: *TyDesc, tv: &mut TyVisitor);
/// Get the address of the `__morestack` stack growth function.
use uint;
use unstable::finally::Finally;
use unstable::intrinsics;
-use unstable::intrinsics::{get_tydesc, contains_managed};
+use unstable::intrinsics::{get_tydesc};
use unstable::raw::{Box, Repr, Slice, Vec};
use vec;
use util;
+#[cfg(not(stage0))]
+use unstable::intrinsics::owns_managed;
+
+#[cfg(stage0)]
+unsafe fn owns_managed<T>() -> bool {
+ intrinsics::contains_managed::<T>()
+}
+
/**
* Creates and initializes an owned vector.
*
#[inline]
pub fn with_capacity<T>(capacity: uint) -> ~[T] {
unsafe {
- if contains_managed::<T>() {
+ if owns_managed::<T>() {
let mut vec = ~[];
vec.reserve(capacity);
vec
if self.capacity() < n {
unsafe {
let td = get_tydesc::<T>();
- if contains_managed::<T>() {
+ if owns_managed::<T>() {
let ptr: *mut *mut Box<Vec<()>> = cast::transmute(self);
::at_vec::raw::reserve_raw(td, ptr, n);
} else {
#[inline]
fn capacity(&self) -> uint {
unsafe {
- if contains_managed::<T>() {
+ if owns_managed::<T>() {
let repr: **Box<Vec<()>> = cast::transmute(self);
(**repr).data.alloc / mem::nonzero_size_of::<T>()
} else {
#[inline]
fn push(&mut self, t: T) {
unsafe {
- if contains_managed::<T>() {
+ if owns_managed::<T>() {
let repr: **Box<Vec<()>> = cast::transmute(&mut *self);
let fill = (**repr).data.fill;
if (**repr).data.alloc <= fill {
// This doesn't bother to make sure we have space.
#[inline] // really pretty please
unsafe fn push_fast<T>(this: &mut ~[T], t: T) {
- if contains_managed::<T>() {
+ if owns_managed::<T>() {
let repr: **mut Box<Vec<u8>> = cast::transmute(this);
let fill = (**repr).data.fill;
(**repr).data.fill += mem::nonzero_size_of::<T>();
use mem;
use unstable::intrinsics;
use vec::{with_capacity, ImmutableVector, MutableVector};
- use unstable::intrinsics::contains_managed;
use unstable::raw::{Box, Vec, Slice};
+ #[cfg(not(stage0))]
+ use unstable::intrinsics::owns_managed;
+
+ #[cfg(stage0)]
+ use vec::owns_managed;
+
/**
* Sets the length of a vector
*
*/
#[inline]
pub unsafe fn set_len<T>(v: &mut ~[T], new_len: uint) {
- if contains_managed::<T>() {
+ if owns_managed::<T>() {
let repr: **mut Box<Vec<()>> = cast::transmute(v);
(**repr).data.fill = new_len * mem::nonzero_size_of::<T>();
} else {