1 use sync::atomic::{AtomicUsize, Ordering};
6 use self::sync_bitset::*;
8 #[cfg(target_pointer_width="64")]
9 const USIZE_BITS: usize = 64;
10 const TLS_KEYS: usize = 128; // Same as POSIX minimum
11 const TLS_KEYS_BITSET_SIZE: usize = (TLS_KEYS + (USIZE_BITS - 1)) / USIZE_BITS;
13 static TLS_KEY_IN_USE: SyncBitset = SYNC_BITSET_INIT;
15 ((* $($exp:tt)*) $($val:tt)*) => (dup!( ($($exp)*) $($val)* $($val)* ));
16 (() $($val:tt)*) => ([$($val),*])
18 static TLS_DESTRUCTOR: [AtomicUsize; TLS_KEYS] = [
19 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
20 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
21 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
22 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
23 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
24 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
25 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
26 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
27 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
28 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
29 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
30 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
31 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
32 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
33 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
34 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
35 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
36 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
37 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
38 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
39 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
40 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
41 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
42 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
43 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
44 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
45 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
46 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
47 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
48 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
49 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
50 AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
54 fn get_tls_ptr() -> *const u8;
55 fn set_tls_ptr(tls: *const u8);
58 #[derive(Copy, Clone)]
60 pub struct Key(NonZeroUsize);
63 fn to_index(self) -> usize {
67 fn from_index(index: usize) -> Self {
68 Key(NonZeroUsize::new(index + 1).unwrap())
71 pub fn as_usize(self) -> usize {
75 pub fn from_usize(index: usize) -> Self {
76 Key(NonZeroUsize::new(index).unwrap())
82 data: [Cell<*mut u8>; TLS_KEYS]
85 pub struct ActiveTls<'a> {
89 impl<'a> Drop for ActiveTls<'a> {
91 let value_with_destructor = |key: usize| {
92 let ptr = TLS_DESTRUCTOR[key].load(Ordering::Relaxed);
93 unsafe { mem::transmute::<_,Option<unsafe extern fn(*mut u8)>>(ptr) }
94 .map(|dtor| (&self.tls.data[key], dtor))
97 let mut any_non_null_dtor = true;
98 while any_non_null_dtor {
99 any_non_null_dtor = false;
100 for (value, dtor) in TLS_KEY_IN_USE.iter().filter_map(&value_with_destructor) {
101 let value = value.replace(ptr::null_mut());
102 if value != ptr::null_mut() {
103 any_non_null_dtor = true;
104 unsafe { dtor(value) }
112 pub fn new() -> Tls {
113 Tls { data: dup!((* * * * * * *) (Cell::new(ptr::null_mut()))) }
116 pub unsafe fn activate(&self) -> ActiveTls {
117 set_tls_ptr(self as *const Tls as _);
118 ActiveTls { tls: self }
122 pub unsafe fn activate_persistent(self: Box<Self>) {
123 set_tls_ptr((&*self) as *const Tls as _);
127 unsafe fn current<'a>() -> &'a Tls {
128 &*(get_tls_ptr() as *const Tls)
131 pub fn create(dtor: Option<unsafe extern fn(*mut u8)>) -> Key {
132 let index = TLS_KEY_IN_USE.set().expect("TLS limit exceeded");
133 TLS_DESTRUCTOR[index].store(dtor.map_or(0, |f| f as usize), Ordering::Relaxed);
134 Key::from_index(index)
137 pub fn set(key: Key, value: *mut u8) {
138 let index = key.to_index();
139 assert!(TLS_KEY_IN_USE.get(index));
140 unsafe { Self::current() }.data[index].set(value);
143 pub fn get(key: Key) -> *mut u8 {
144 let index = key.to_index();
145 assert!(TLS_KEY_IN_USE.get(index));
146 unsafe { Self::current() }.data[index].get()
149 pub fn destroy(key: Key) {
150 TLS_KEY_IN_USE.clear(key.to_index());
155 use sync::atomic::{AtomicUsize, Ordering};
156 use iter::{Enumerate, Peekable};
158 use super::{TLS_KEYS_BITSET_SIZE, USIZE_BITS};
160 /// A bitset that can be used synchronously.
161 pub(super) struct SyncBitset([AtomicUsize; TLS_KEYS_BITSET_SIZE]);
163 pub(super) const SYNC_BITSET_INIT: SyncBitset =
164 SyncBitset([AtomicUsize::new(0), AtomicUsize::new(0)]);
167 pub fn get(&self, index: usize) -> bool {
168 let (hi, lo) = Self::split(index);
169 (self.0[hi].load(Ordering::Relaxed) & lo) != 0
173 pub fn iter(&self) -> SyncBitsetIter {
175 iter: self.0.iter().enumerate().peekable(),
180 pub fn clear(&self, index: usize) {
181 let (hi, lo) = Self::split(index);
182 self.0[hi].fetch_and(!lo, Ordering::Relaxed);
185 /// Sets any unset bit. Not atomic. Returns `None` if all bits were
186 /// observed to be set.
187 pub fn set(&self) -> Option<usize> {
188 'elems: for (idx, elem) in self.0.iter().enumerate() {
189 let mut current = elem.load(Ordering::Relaxed);
194 let trailing_ones = (!current).trailing_zeros() as usize;
195 match elem.compare_exchange(
197 current | (1 << trailing_ones),
201 Ok(_) => return Some(idx * USIZE_BITS + trailing_ones),
202 Err(previous) => current = previous,
209 fn split(index: usize) -> (usize, usize) {
210 (index / USIZE_BITS, 1 << (index % USIZE_BITS))
214 pub(super) struct SyncBitsetIter<'a> {
215 iter: Peekable<Enumerate<Iter<'a, AtomicUsize>>>,
219 impl<'a> Iterator for SyncBitsetIter<'a> {
222 fn next(&mut self) -> Option<usize> {
223 self.iter.peek().cloned().and_then(|(idx, elem)| {
224 let elem = elem.load(Ordering::Relaxed);
225 let low_mask = (1 << self.elem_idx) - 1;
226 let next = elem & !low_mask;
227 let next_idx = next.trailing_zeros() as usize;
228 self.elem_idx = next_idx + 1;
229 if self.elem_idx >= 64 {
235 _ => Some(idx * USIZE_BITS + next_idx),
245 fn test_data(bitset: [usize; 2], bit_indices: &[usize]) {
246 let set = SyncBitset([AtomicUsize::new(bitset[0]), AtomicUsize::new(bitset[1])]);
247 assert_eq!(set.iter().collect::<Vec<_>>(), bit_indices);
248 for &i in bit_indices {
255 test_data([0b0110_1001, 0], &[0, 3, 5, 6]);
256 test_data([0x8000_0000_0000_0000, 0x8000_0000_0000_0001], &[63, 64, 127]);
257 test_data([0, 0], &[]);
262 let set = SYNC_BITSET_INIT;
263 let key = set.set().unwrap();
264 assert!(set.get(key));
266 assert!(!set.get(key));