//We only care about the operation here
match *split.get(1) {
- "cxchg" => (1, vec!(ty::mk_imm_rptr(tcx,
- ty::ReLateBound(it.id, ty::BrAnon(0)),
- param(ccx, 0)),
- param(ccx, 0),
- param(ccx, 0)), param(ccx, 0)),
- "load" => (1,
- vec!(
- ty::mk_imm_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)),
- param(ccx, 0))
- ),
- param(ccx, 0)),
- "store" => (1,
- vec!(
- ty::mk_imm_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)),
- param(ccx, 0)),
- param(ccx, 0)
- ),
- ty::mk_nil()),
+ "cxchg" => (1, vec!(ty::mk_mut_ptr(tcx, param(ccx, 0)),
+ param(ccx, 0),
+ param(ccx, 0)),
+ param(ccx, 0)),
+ "load" => (1, vec!(ty::mk_imm_ptr(tcx, param(ccx, 0))),
+ param(ccx, 0)),
+ "store" => (1, vec!(ty::mk_mut_ptr(tcx, param(ccx, 0)), param(ccx, 0)),
+ ty::mk_nil()),
"xchg" | "xadd" | "xsub" | "and" | "nand" | "or" | "xor" | "max" |
"min" | "umax" | "umin" => {
- (1, vec!(ty::mk_imm_rptr(tcx,
- ty::ReLateBound(it.id, ty::BrAnon(0)),
- param(ccx, 0)), param(ccx, 0) ),
+ (1, vec!(ty::mk_mut_ptr(tcx, param(ccx, 0)), param(ccx, 0)),
param(ccx, 0))
}
"fence" => {
#[cfg(not(stage0))]
extern "rust-intrinsic" {
- pub fn atomic_cxchg<T>(dst: &T, old: T, src: T) -> T;
- pub fn atomic_cxchg_acq<T>(dst: &T, old: T, src: T) -> T;
- pub fn atomic_cxchg_rel<T>(dst: &T, old: T, src: T) -> T;
- pub fn atomic_cxchg_acqrel<T>(dst: &T, old: T, src: T) -> T;
- pub fn atomic_cxchg_relaxed<T>(dst: &T, old: T, src: T) -> T;
-
- pub fn atomic_load<T>(src: &T) -> T;
- pub fn atomic_load_acq<T>(src: &T) -> T;
- pub fn atomic_load_relaxed<T>(src: &T) -> T;
-
- pub fn atomic_store<T>(dst: &T, val: T);
- pub fn atomic_store_rel<T>(dst: &T, val: T);
- pub fn atomic_store_relaxed<T>(dst: &T, val: T);
-
- pub fn atomic_xchg<T>(dst: &T, src: T) -> T;
- pub fn atomic_xchg_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_xchg_rel<T>(dst: &T, src: T) -> T;
- pub fn atomic_xchg_acqrel<T>(dst: &T, src: T) -> T;
- pub fn atomic_xchg_relaxed<T>(dst: &T, src: T) -> T;
-
- pub fn atomic_xadd<T>(dst: &T, src: T) -> T;
- pub fn atomic_xadd_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_xadd_rel<T>(dst: &T, src: T) -> T;
- pub fn atomic_xadd_acqrel<T>(dst: &T, src: T) -> T;
- pub fn atomic_xadd_relaxed<T>(dst: &T, src: T) -> T;
-
- pub fn atomic_xsub<T>(dst: &T, src: T) -> T;
- pub fn atomic_xsub_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_xsub_rel<T>(dst: &T, src: T) -> T;
- pub fn atomic_xsub_acqrel<T>(dst: &T, src: T) -> T;
- pub fn atomic_xsub_relaxed<T>(dst: &T, src: T) -> T;
-
- pub fn atomic_and<T>(dst: &T, src: T) -> T;
- pub fn atomic_and_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_and_rel<T>(dst: &T, src: T) -> T;
- pub fn atomic_and_acqrel<T>(dst: &T, src: T) -> T;
- pub fn atomic_and_relaxed<T>(dst: &T, src: T) -> T;
-
- pub fn atomic_nand<T>(dst: &T, src: T) -> T;
- pub fn atomic_nand_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_nand_rel<T>(dst: &T, src: T) -> T;
- pub fn atomic_nand_acqrel<T>(dst: &T, src: T) -> T;
- pub fn atomic_nand_relaxed<T>(dst: &T, src: T) -> T;
-
- pub fn atomic_or<T>(dst: &T, src: T) -> T;
- pub fn atomic_or_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_or_rel<T>(dst: &T, src: T) -> T;
- pub fn atomic_or_acqrel<T>(dst: &T, src: T) -> T;
- pub fn atomic_or_relaxed<T>(dst: &T, src: T) -> T;
-
- pub fn atomic_xor<T>(dst: &T, src: T) -> T;
- pub fn atomic_xor_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_xor_rel<T>(dst: &T, src: T) -> T;
- pub fn atomic_xor_acqrel<T>(dst: &T, src: T) -> T;
- pub fn atomic_xor_relaxed<T>(dst: &T, src: T) -> T;
-
- pub fn atomic_max<T>(dst: &T, src: T) -> T;
- pub fn atomic_max_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_max_rel<T>(dst: &T, src: T) -> T;
- pub fn atomic_max_acqrel<T>(dst: &T, src: T) -> T;
- pub fn atomic_max_relaxed<T>(dst: &T, src: T) -> T;
-
- pub fn atomic_min<T>(dst: &T, src: T) -> T;
- pub fn atomic_min_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_min_rel<T>(dst: &T, src: T) -> T;
- pub fn atomic_min_acqrel<T>(dst: &T, src: T) -> T;
- pub fn atomic_min_relaxed<T>(dst: &T, src: T) -> T;
-
- pub fn atomic_umin<T>(dst: &T, src: T) -> T;
- pub fn atomic_umin_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_umin_rel<T>(dst: &T, src: T) -> T;
- pub fn atomic_umin_acqrel<T>(dst: &T, src: T) -> T;
- pub fn atomic_umin_relaxed<T>(dst: &T, src: T) -> T;
-
- pub fn atomic_umax<T>(dst: &T, src: T) -> T;
- pub fn atomic_umax_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_umax_rel<T>(dst: &T, src: T) -> T;
- pub fn atomic_umax_acqrel<T>(dst: &T, src: T) -> T;
- pub fn atomic_umax_relaxed<T>(dst: &T, src: T) -> T;
+ // NB: These intrinsics take unsafe pointers because they mutate aliased
+ // memory, which is not valid for either `&` or `&mut`.
+
+ pub fn atomic_cxchg<T>(dst: *mut T, old: T, src: T) -> T;
+ pub fn atomic_cxchg_acq<T>(dst: *mut T, old: T, src: T) -> T;
+ pub fn atomic_cxchg_rel<T>(dst: *mut T, old: T, src: T) -> T;
+ pub fn atomic_cxchg_acqrel<T>(dst: *mut T, old: T, src: T) -> T;
+ pub fn atomic_cxchg_relaxed<T>(dst: *mut T, old: T, src: T) -> T;
+
+ pub fn atomic_load<T>(src: *T) -> T;
+ pub fn atomic_load_acq<T>(src: *T) -> T;
+ pub fn atomic_load_relaxed<T>(src: *T) -> T;
+
+ pub fn atomic_store<T>(dst: *mut T, val: T);
+ pub fn atomic_store_rel<T>(dst: *mut T, val: T);
+ pub fn atomic_store_relaxed<T>(dst: *mut T, val: T);
+
+ pub fn atomic_xchg<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xchg_acq<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xchg_rel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xchg_acqrel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xchg_relaxed<T>(dst: *mut T, src: T) -> T;
+
+ pub fn atomic_xadd<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xadd_acq<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xadd_rel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xadd_acqrel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xadd_relaxed<T>(dst: *mut T, src: T) -> T;
+
+ pub fn atomic_xsub<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xsub_acq<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xsub_rel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xsub_acqrel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xsub_relaxed<T>(dst: *mut T, src: T) -> T;
+
+ pub fn atomic_and<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_and_acq<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_and_rel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_and_acqrel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_and_relaxed<T>(dst: *mut T, src: T) -> T;
+
+ pub fn atomic_nand<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_nand_acq<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_nand_rel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_nand_acqrel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_nand_relaxed<T>(dst: *mut T, src: T) -> T;
+
+ pub fn atomic_or<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_or_acq<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_or_rel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_or_acqrel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_or_relaxed<T>(dst: *mut T, src: T) -> T;
+
+ pub fn atomic_xor<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xor_acq<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xor_rel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xor_acqrel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xor_relaxed<T>(dst: *mut T, src: T) -> T;
+
+ pub fn atomic_max<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_max_acq<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_max_rel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_max_acqrel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_max_relaxed<T>(dst: *mut T, src: T) -> T;
+
+ pub fn atomic_min<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_min_acq<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_min_rel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_min_acqrel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_min_relaxed<T>(dst: *mut T, src: T) -> T;
+
+ pub fn atomic_umin<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_umin_acq<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_umin_rel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_umin_acqrel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_umin_relaxed<T>(dst: *mut T, src: T) -> T;
+
+ pub fn atomic_umax<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_umax_acq<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_umax_rel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_umax_acqrel<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_umax_relaxed<T>(dst: *mut T, src: T) -> T;
}
extern "rust-intrinsic" {
/// Load the value
#[inline]
pub fn load(&self, order: Ordering) -> bool {
- unsafe { atomic_load(&*self.v.get(), order) > 0 }
+ unsafe { atomic_load(self.v.get() as *uint, order) > 0 }
}
/// Store the value
pub fn store(&self, val: bool, order: Ordering) {
let val = if val { UINT_TRUE } else { 0 };
- unsafe { atomic_store(&mut *self.v.get(), val, order); }
+ unsafe { atomic_store(self.v.get(), val, order); }
}
/// Store a value, returning the old value
pub fn swap(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
- unsafe { atomic_swap(&mut *self.v.get(), val, order) > 0 }
+ unsafe { atomic_swap(self.v.get(), val, order) > 0 }
}
/// If the current value is the same as expected, store a new value
let old = if old { UINT_TRUE } else { 0 };
let new = if new { UINT_TRUE } else { 0 };
- unsafe { atomic_compare_and_swap(&mut *self.v.get(), old, new, order) > 0 }
+ unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) > 0 }
}
/// A logical "and" operation
pub fn fetch_and(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
- unsafe { atomic_and(&mut *self.v.get(), val, order) > 0 }
+ unsafe { atomic_and(self.v.get(), val, order) > 0 }
}
/// A logical "nand" operation
pub fn fetch_nand(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
- unsafe { atomic_nand(&mut *self.v.get(), val, order) > 0 }
+ unsafe { atomic_nand(self.v.get(), val, order) > 0 }
}
/// A logical "or" operation
pub fn fetch_or(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
- unsafe { atomic_or(&mut *self.v.get(), val, order) > 0 }
+ unsafe { atomic_or(self.v.get(), val, order) > 0 }
}
/// A logical "xor" operation
pub fn fetch_xor(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
- unsafe { atomic_xor(&mut *self.v.get(), val, order) > 0 }
+ unsafe { atomic_xor(self.v.get(), val, order) > 0 }
}
}
/// Load the value
#[inline]
pub fn load(&self, order: Ordering) -> int {
- unsafe { atomic_load(&*self.v.get(), order) }
+ unsafe { atomic_load(self.v.get() as *int, order) }
}
/// Store the value
#[inline]
pub fn store(&self, val: int, order: Ordering) {
- unsafe { atomic_store(&mut *self.v.get(), val, order); }
+ unsafe { atomic_store(self.v.get(), val, order); }
}
/// Store a value, returning the old value
#[inline]
pub fn swap(&self, val: int, order: Ordering) -> int {
- unsafe { atomic_swap(&mut *self.v.get(), val, order) }
+ unsafe { atomic_swap(self.v.get(), val, order) }
}
/// If the current value is the same as expected, store a new value
/// If the return value is equal to `old` then the value was updated.
#[inline]
pub fn compare_and_swap(&self, old: int, new: int, order: Ordering) -> int {
- unsafe { atomic_compare_and_swap(&mut *self.v.get(), old, new, order) }
+ unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) }
}
/// Add to the current value, returning the previous
/// ```
#[inline]
pub fn fetch_add(&self, val: int, order: Ordering) -> int {
- unsafe { atomic_add(&mut *self.v.get(), val, order) }
+ unsafe { atomic_add(self.v.get(), val, order) }
}
/// Subtract from the current value, returning the previous
/// ```
#[inline]
pub fn fetch_sub(&self, val: int, order: Ordering) -> int {
- unsafe { atomic_sub(&mut *self.v.get(), val, order) }
+ unsafe { atomic_sub(self.v.get(), val, order) }
}
}
#[inline]
pub fn load(&self, order: Ordering) -> u64 {
- unsafe { atomic_load(&*self.v.get(), order) }
+ unsafe { atomic_load(self.v.get(), order) }
}
#[inline]
pub fn store(&self, val: u64, order: Ordering) {
- unsafe { atomic_store(&mut *self.v.get(), val, order); }
+ unsafe { atomic_store(self.v.get(), val, order); }
}
#[inline]
pub fn swap(&self, val: u64, order: Ordering) -> u64 {
- unsafe { atomic_swap(&mut *self.v.get(), val, order) }
+ unsafe { atomic_swap(self.v.get(), val, order) }
}
#[inline]
pub fn compare_and_swap(&self, old: u64, new: u64, order: Ordering) -> u64 {
- unsafe { atomic_compare_and_swap(&mut *self.v.get(), old, new, order) }
+ unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) }
}
#[inline]
pub fn fetch_add(&self, val: u64, order: Ordering) -> u64 {
- unsafe { atomic_add(&mut *self.v.get(), val, order) }
+ unsafe { atomic_add(self.v.get(), val, order) }
}
#[inline]
pub fn fetch_sub(&self, val: u64, order: Ordering) -> u64 {
- unsafe { atomic_sub(&mut *self.v.get(), val, order) }
+ unsafe { atomic_sub(self.v.get(), val, order) }
}
}
/// Load the value
#[inline]
pub fn load(&self, order: Ordering) -> uint {
- unsafe { atomic_load(&*self.v.get(), order) }
+ unsafe { atomic_load(self.v.get() as *uint, order) }
}
/// Store the value
#[inline]
pub fn store(&self, val: uint, order: Ordering) {
- unsafe { atomic_store(&mut *self.v.get(), val, order); }
+ unsafe { atomic_store(self.v.get(), val, order); }
}
/// Store a value, returning the old value
#[inline]
pub fn swap(&self, val: uint, order: Ordering) -> uint {
- unsafe { atomic_swap(&mut *self.v.get(), val, order) }
+ unsafe { atomic_swap(self.v.get(), val, order) }
}
/// If the current value is the same as expected, store a new value
/// If the return value is equal to `old` then the value was updated.
#[inline]
pub fn compare_and_swap(&self, old: uint, new: uint, order: Ordering) -> uint {
- unsafe { atomic_compare_and_swap(&mut *self.v.get(), old, new, order) }
+ unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) }
}
/// Add to the current value, returning the previous
/// ```
#[inline]
pub fn fetch_add(&self, val: uint, order: Ordering) -> uint {
- unsafe { atomic_add(&mut *self.v.get(), val, order) }
+ unsafe { atomic_add(self.v.get(), val, order) }
}
/// Subtract from the current value, returning the previous
/// ```
#[inline]
pub fn fetch_sub(&self, val: uint, order: Ordering) -> uint {
- unsafe { atomic_sub(&mut *self.v.get(), val, order) }
+ unsafe { atomic_sub(self.v.get(), val, order) }
}
}
#[inline]
pub fn load(&self, order: Ordering) -> *mut T {
unsafe {
- atomic_load(&*self.p.get(), order) as *mut T
+ atomic_load(self.p.get() as **mut T, order) as *mut T
}
}
/// Store the value
#[inline]
pub fn store(&self, ptr: *mut T, order: Ordering) {
- unsafe { atomic_store(&mut *self.p.get(), ptr as uint, order); }
+ unsafe { atomic_store(self.p.get(), ptr as uint, order); }
}
/// Store a value, returning the old value
#[inline]
pub fn swap(&self, ptr: *mut T, order: Ordering) -> *mut T {
- unsafe { atomic_swap(&mut *self.p.get(), ptr as uint, order) as *mut T }
+ unsafe { atomic_swap(self.p.get(), ptr as uint, order) as *mut T }
}
/// If the current value is the same as expected, store a new value
#[inline]
pub fn compare_and_swap(&self, old: *mut T, new: *mut T, order: Ordering) -> *mut T {
unsafe {
- atomic_compare_and_swap(&mut *self.p.get(), old as uint,
+ atomic_compare_and_swap(self.p.get(), old as uint,
new as uint, order) as *mut T
}
}
unsafe {
let val = cast::transmute(val);
- let p = atomic_swap(&mut *self.p.get(), val, order);
+ let p = atomic_swap(self.p.get(), val, order);
if p as uint == 0 {
None
} else {
unsafe {
let val = cast::transmute(val);
let expected = cast::transmute(0);
- let oldval = atomic_compare_and_swap(&mut *self.p.get(), expected, val, order);
+ let oldval = atomic_compare_and_swap(self.p.get(), expected, val, order);
if oldval == expected {
None
} else {
/// result does not get invalidated by another task after this returns.
#[inline]
pub fn is_empty(&self, order: Ordering) -> bool {
- unsafe { atomic_load(&*self.p.get(), order) as uint == 0 }
+ unsafe { atomic_load(self.p.get() as *uint, order) as uint == 0 }
}
}
}
#[inline]
-pub unsafe fn atomic_store<T>(dst: &T, val: T, order:Ordering) {
+pub unsafe fn atomic_store<T>(dst: *mut T, val: T, order:Ordering) {
match order {
Release => intrinsics::atomic_store_rel(dst, val),
Relaxed => intrinsics::atomic_store_relaxed(dst, val),
}
#[inline]
-pub unsafe fn atomic_load<T>(dst: &T, order:Ordering) -> T {
+pub unsafe fn atomic_load<T>(dst: *mut T, order:Ordering) -> T {
match order {
Acquire => intrinsics::atomic_load_acq(dst),
Relaxed => intrinsics::atomic_load_relaxed(dst),
}
#[inline]
-pub unsafe fn atomic_swap<T>(dst: &T, val: T, order: Ordering) -> T {
+pub unsafe fn atomic_swap<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xchg_acq(dst, val),
Release => intrinsics::atomic_xchg_rel(dst, val),
/// Returns the old value (like __sync_fetch_and_add).
#[inline]
-pub unsafe fn atomic_add<T>(dst: &T, val: T, order: Ordering) -> T {
+pub unsafe fn atomic_add<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xadd_acq(dst, val),
Release => intrinsics::atomic_xadd_rel(dst, val),
/// Returns the old value (like __sync_fetch_and_sub).
#[inline]
-pub unsafe fn atomic_sub<T>(dst: &T, val: T, order: Ordering) -> T {
+pub unsafe fn atomic_sub<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xsub_acq(dst, val),
Release => intrinsics::atomic_xsub_rel(dst, val),
}
#[inline]
-pub unsafe fn atomic_compare_and_swap<T>(dst:&T, old:T, new:T, order: Ordering) -> T {
+pub unsafe fn atomic_compare_and_swap<T>(dst: *mut T, old:T, new:T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_cxchg_acq(dst, old, new),
Release => intrinsics::atomic_cxchg_rel(dst, old, new),
}
#[inline]
-pub unsafe fn atomic_and<T>(dst: &T, val: T, order: Ordering) -> T {
+pub unsafe fn atomic_and<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_and_acq(dst, val),
Release => intrinsics::atomic_and_rel(dst, val),
#[inline]
-pub unsafe fn atomic_or<T>(dst: &T, val: T, order: Ordering) -> T {
+pub unsafe fn atomic_or<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_or_acq(dst, val),
Release => intrinsics::atomic_or_rel(dst, val),
#[inline]
-pub unsafe fn atomic_xor<T>(dst: &T, val: T, order: Ordering) -> T {
+pub unsafe fn atomic_xor<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xor_acq(dst, val),
Release => intrinsics::atomic_xor_rel(dst, val),
fn different_sizes() {
unsafe {
let mut slot = 0u16;
- assert_eq!(super::atomic_swap(&slot, 1, SeqCst), 0);
+ assert_eq!(super::atomic_swap(&mut slot, 1, SeqCst), 0);
let mut slot = 0u8;
- assert_eq!(super::atomic_compare_and_swap(&slot, 1, 2, SeqCst), 0);
+ assert_eq!(super::atomic_compare_and_swap(&mut slot, 1, 2, SeqCst), 0);
let mut slot = 0u32;
assert_eq!(super::atomic_load(&slot, SeqCst), 0);
let mut slot = 0u64;
- super::atomic_store(&slot, 2, SeqCst);
+ super::atomic_store(&mut slot, 2, SeqCst);
}
}
}
pub mod rusti {
extern "rust-intrinsic" {
- pub fn atomic_cxchg<T>(dst: &T, old: T, src: T) -> T;
- pub fn atomic_cxchg_acq<T>(dst: &T, old: T, src: T) -> T;
- pub fn atomic_cxchg_rel<T>(dst: &T, old: T, src: T) -> T;
-
- pub fn atomic_xchg<T>(dst: &T, src: T) -> T;
- pub fn atomic_xchg_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_xchg_rel<T>(dst: &T, src: T) -> T;
-
- pub fn atomic_xadd<T>(dst: &T, src: T) -> T;
- pub fn atomic_xadd_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_xadd_rel<T>(dst: &T, src: T) -> T;
-
- pub fn atomic_xsub<T>(dst: &T, src: T) -> T;
- pub fn atomic_xsub_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_xsub_rel<T>(dst: &T, src: T) -> T;
+ pub fn atomic_xchg<T>(dst: *mut T, src: T) -> T;
}
}
#[inline(always)]
-pub fn atomic_xchg(dst: &mut int, src: int) -> int {
+pub fn atomic_xchg(dst: *mut int, src: int) -> int {
unsafe {
rusti::atomic_xchg(dst, src)
}
mod rusti {
extern "rust-intrinsic" {
- pub fn atomic_cxchg<T>(dst: &T, old: T, src: T) -> T;
- pub fn atomic_cxchg_acq<T>(dst: &T, old: T, src: T) -> T;
- pub fn atomic_cxchg_rel<T>(dst: &T, old: T, src: T) -> T;
+ pub fn atomic_cxchg<T>(dst: *mut T, old: T, src: T) -> T;
+ pub fn atomic_cxchg_acq<T>(dst: *mut T, old: T, src: T) -> T;
+ pub fn atomic_cxchg_rel<T>(dst: *mut T, old: T, src: T) -> T;
- pub fn atomic_load<T>(src: &T) -> T;
- pub fn atomic_load_acq<T>(src: &T) -> T;
+ pub fn atomic_load<T>(src: *T) -> T;
+ pub fn atomic_load_acq<T>(src: *T) -> T;
- pub fn atomic_store<T>(dst: &T, val: T);
- pub fn atomic_store_rel<T>(dst: &T, val: T);
+ pub fn atomic_store<T>(dst: *mut T, val: T);
+ pub fn atomic_store_rel<T>(dst: *mut T, val: T);
- pub fn atomic_xchg<T>(dst: &T, src: T) -> T;
- pub fn atomic_xchg_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_xchg_rel<T>(dst: &T, src: T) -> T;
+ pub fn atomic_xchg<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xchg_acq<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xchg_rel<T>(dst: *mut T, src: T) -> T;
- pub fn atomic_xadd<T>(dst: &T, src: T) -> T;
- pub fn atomic_xadd_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_xadd_rel<T>(dst: &T, src: T) -> T;
+ pub fn atomic_xadd<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xadd_acq<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xadd_rel<T>(dst: *mut T, src: T) -> T;
- pub fn atomic_xsub<T>(dst: &T, src: T) -> T;
- pub fn atomic_xsub_acq<T>(dst: &T, src: T) -> T;
- pub fn atomic_xsub_rel<T>(dst: &T, src: T) -> T;
+ pub fn atomic_xsub<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xsub_acq<T>(dst: *mut T, src: T) -> T;
+ pub fn atomic_xsub_rel<T>(dst: *mut T, src: T) -> T;
}
}
unsafe {
let mut x = ~1;
- assert_eq!(rusti::atomic_load(x), 1);
+ assert_eq!(rusti::atomic_load(&*x), 1);
*x = 5;
- assert_eq!(rusti::atomic_load_acq(x), 5);
+ assert_eq!(rusti::atomic_load_acq(&*x), 5);
- rusti::atomic_store(x,3);
+ rusti::atomic_store(&mut *x,3);
assert_eq!(*x, 3);
- rusti::atomic_store_rel(x,1);
+ rusti::atomic_store_rel(&mut *x,1);
assert_eq!(*x, 1);
- assert_eq!(rusti::atomic_cxchg(x, 1, 2), 1);
+ assert_eq!(rusti::atomic_cxchg(&mut *x, 1, 2), 1);
assert_eq!(*x, 2);
- assert_eq!(rusti::atomic_cxchg_acq(x, 1, 3), 2);
+ assert_eq!(rusti::atomic_cxchg_acq(&mut *x, 1, 3), 2);
assert_eq!(*x, 2);
- assert_eq!(rusti::atomic_cxchg_rel(x, 2, 1), 2);
+ assert_eq!(rusti::atomic_cxchg_rel(&mut *x, 2, 1), 2);
assert_eq!(*x, 1);
- assert_eq!(rusti::atomic_xchg(x, 0), 1);
+ assert_eq!(rusti::atomic_xchg(&mut *x, 0), 1);
assert_eq!(*x, 0);
- assert_eq!(rusti::atomic_xchg_acq(x, 1), 0);
+ assert_eq!(rusti::atomic_xchg_acq(&mut *x, 1), 0);
assert_eq!(*x, 1);
- assert_eq!(rusti::atomic_xchg_rel(x, 0), 1);
+ assert_eq!(rusti::atomic_xchg_rel(&mut *x, 0), 1);
assert_eq!(*x, 0);
- assert_eq!(rusti::atomic_xadd(x, 1), 0);
- assert_eq!(rusti::atomic_xadd_acq(x, 1), 1);
- assert_eq!(rusti::atomic_xadd_rel(x, 1), 2);
+ assert_eq!(rusti::atomic_xadd(&mut *x, 1), 0);
+ assert_eq!(rusti::atomic_xadd_acq(&mut *x, 1), 1);
+ assert_eq!(rusti::atomic_xadd_rel(&mut *x, 1), 2);
assert_eq!(*x, 3);
- assert_eq!(rusti::atomic_xsub(x, 1), 3);
- assert_eq!(rusti::atomic_xsub_acq(x, 1), 2);
- assert_eq!(rusti::atomic_xsub_rel(x, 1), 1);
+ assert_eq!(rusti::atomic_xsub(&mut *x, 1), 3);
+ assert_eq!(rusti::atomic_xsub_acq(&mut *x, 1), 2);
+ assert_eq!(rusti::atomic_xsub_rel(&mut *x, 1), 1);
assert_eq!(*x, 0);
}
}