1 //! Thread local storage
3 #![unstable(feature = "thread_local_internals", issue = "none")]
5 use crate::error::Error;
8 /// A thread local storage key which owns its contents.
10 /// This key uses the fastest possible implementation available to it for the
11 /// target platform. It is instantiated with the [`thread_local!`] macro and the
12 /// primary method is the [`with`] method.
14 /// The [`with`] method yields a reference to the contained value which cannot be
15 /// sent across threads or escape the given closure.
17 /// # Initialization and Destruction
19 /// Initialization is dynamically performed on the first call to [`with`]
20 /// within a thread, and values that implement [`Drop`] get destructed when a
21 /// thread exits. Some caveats apply, which are explained below.
23 /// A `LocalKey`'s initializer cannot recursively depend on itself, and using
24 /// a `LocalKey` in this way will cause the initializer to infinitely recurse
25 /// on the first call to `with`.
30 /// use std::cell::RefCell;
33 /// thread_local!(static FOO: RefCell<u32> = RefCell::new(1));
36 /// assert_eq!(*f.borrow(), 1);
37 /// *f.borrow_mut() = 2;
40 /// // each thread starts out with the initial value of 1
41 /// let t = thread::spawn(move|| {
43 /// assert_eq!(*f.borrow(), 1);
44 /// *f.borrow_mut() = 3;
48 /// // wait for the thread to complete and bail out on panic
49 /// t.join().unwrap();
51 /// // we retain our original value of 2 despite the child thread
53 /// assert_eq!(*f.borrow(), 2);
57 /// # Platform-specific behavior
59 /// Note that a "best effort" is made to ensure that destructors for types
60 /// stored in thread local storage are run, but not all platforms can guarantee
61 /// that destructors will be run for all types in thread local storage. For
62 /// example, there are a number of known caveats where destructors are not run:
64 /// 1. On Unix systems when pthread-based TLS is being used, destructors will
65 /// not be run for TLS values on the main thread when it exits. Note that the
66 /// application will exit immediately after the main thread exits as well.
67 /// 2. On all platforms it's possible for TLS to re-initialize other TLS slots
68 /// during destruction. Some platforms ensure that this cannot happen
69 /// infinitely by preventing re-initialization of any slot that has been
70 /// destroyed, but not all platforms have this guard. Those platforms that do
71 /// not guard typically have a synthetic limit after which point no more
72 /// destructors are run.
74 /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
75 /// [`thread_local!`]: ../../std/macro.thread_local.html
76 /// [`Drop`]: ../../std/ops/trait.Drop.html
77 #[stable(feature = "rust1", since = "1.0.0")]
78 pub struct LocalKey<T: 'static> {
79 // This outer `LocalKey<T>` type is what's going to be stored in statics,
80 // but actual data inside will sometimes be tagged with #[thread_local].
81 // It's not valid for a true static to reference a #[thread_local] static,
82 // so we get around that by exposing an accessor through a layer of function
83 // indirection (this thunk).
85 // Note that the thunk is itself unsafe because the returned lifetime of the
86 // slot where data lives, `'static`, is not actually valid. The lifetime
87 // here is actually slightly shorter than the currently running thread!
89 // Although this is an extra layer of indirection, it should in theory be
90 // trivially devirtualizable by LLVM because the value of `inner` never
91 // changes and the constant should be readonly within a crate. This mainly
92 // only runs into problems when TLS statics are exported across crates.
93 inner: unsafe fn() -> Option<&'static T>,
96 #[stable(feature = "std_debug", since = "1.16.0")]
97 impl<T: 'static> fmt::Debug for LocalKey<T> {
98 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
99 f.pad("LocalKey { .. }")
103 /// Declare a new thread local storage key of type [`std::thread::LocalKey`].
107 /// The macro wraps any number of static declarations and makes them thread local.
108 /// Publicity and attributes for each static are allowed. Example:
111 /// use std::cell::RefCell;
113 /// pub static FOO: RefCell<u32> = RefCell::new(1);
116 /// static BAR: RefCell<f32> = RefCell::new(1.0);
121 /// See [LocalKey documentation][`std::thread::LocalKey`] for more
124 /// [`std::thread::LocalKey`]: ../std/thread/struct.LocalKey.html
126 #[stable(feature = "rust1", since = "1.0.0")]
127 #[allow_internal_unstable(thread_local_internals)]
128 macro_rules! thread_local {
129 // empty (base case for the recursion)
132 // process multiple declarations
133 ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr; $($rest:tt)*) => (
134 $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
135 $crate::thread_local!($($rest)*);
138 // handle a single declaration
139 ($(#[$attr:meta])* $vis:vis static $name:ident: $t:ty = $init:expr) => (
140 $crate::__thread_local_inner!($(#[$attr])* $vis $name, $t, $init);
145 #[unstable(feature = "thread_local_internals", reason = "should not be necessary", issue = "none")]
147 #[allow_internal_unstable(thread_local_internals, cfg_target_thread_local, thread_local)]
148 #[allow_internal_unsafe]
149 macro_rules! __thread_local_inner {
150 (@key $t:ty, $init:expr) => {
153 fn __init() -> $t { $init }
155 unsafe fn __getit() -> $crate::option::Option<&'static $t> {
156 #[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
157 static __KEY: $crate::thread::__StaticLocalKeyInner<$t> =
158 $crate::thread::__StaticLocalKeyInner::new();
163 not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
165 static __KEY: $crate::thread::__FastLocalKeyInner<$t> =
166 $crate::thread::__FastLocalKeyInner::new();
169 not(target_thread_local),
170 not(all(target_arch = "wasm32", not(target_feature = "atomics"))),
172 static __KEY: $crate::thread::__OsLocalKeyInner<$t> =
173 $crate::thread::__OsLocalKeyInner::new();
179 $crate::thread::LocalKey::new(__getit)
183 ($(#[$attr:meta])* $vis:vis $name:ident, $t:ty, $init:expr) => {
184 $(#[$attr])* $vis const $name: $crate::thread::LocalKey<$t> =
185 $crate::__thread_local_inner!(@key $t, $init);
189 /// An error returned by [`LocalKey::try_with`](struct.LocalKey.html#method.try_with).
190 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
191 #[derive(Clone, Copy, Eq, PartialEq)]
192 pub struct AccessError {
196 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
197 impl fmt::Debug for AccessError {
198 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
199 f.debug_struct("AccessError").finish()
203 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
204 impl fmt::Display for AccessError {
205 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
206 fmt::Display::fmt("already destroyed", f)
210 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
211 impl Error for AccessError {}
213 impl<T: 'static> LocalKey<T> {
216 feature = "thread_local_internals",
217 reason = "recently added to create a key",
220 pub const unsafe fn new(inner: unsafe fn() -> Option<&'static T>) -> LocalKey<T> {
224 /// Acquires a reference to the value in this TLS key.
226 /// This will lazily initialize the value if this thread has not referenced
231 /// This function will `panic!()` if the key currently has its
232 /// destructor running, and it **may** panic if the destructor has
233 /// previously been run for this thread.
234 #[stable(feature = "rust1", since = "1.0.0")]
235 pub fn with<F, R>(&'static self, f: F) -> R
239 self.try_with(f).expect(
240 "cannot access a Thread Local Storage value \
241 during or after destruction",
245 /// Acquires a reference to the value in this TLS key.
247 /// This will lazily initialize the value if this thread has not referenced
248 /// this key yet. If the key has been destroyed (which may happen if this is called
249 /// in a destructor), this function will return an [`AccessError`](struct.AccessError.html).
253 /// This function will still `panic!()` if the key is uninitialized and the
254 /// key's initializer panics.
255 #[stable(feature = "thread_local_try_with", since = "1.26.0")]
256 pub fn try_with<F, R>(&'static self, f: F) -> Result<R, AccessError>
261 let thread_local = (self.inner)().ok_or(AccessError { _private: () })?;
268 use crate::cell::UnsafeCell;
272 pub struct LazyKeyInner<T> {
273 inner: UnsafeCell<Option<T>>,
276 impl<T> LazyKeyInner<T> {
277 pub const fn new() -> LazyKeyInner<T> {
278 LazyKeyInner { inner: UnsafeCell::new(None) }
281 pub unsafe fn get(&self) -> Option<&'static T> {
282 (*self.inner.get()).as_ref()
285 pub unsafe fn initialize<F: FnOnce() -> T>(&self, init: F) -> &'static T {
286 // Execute the initialization up front, *then* move it into our slot,
287 // just in case initialization fails.
289 let ptr = self.inner.get();
291 // note that this can in theory just be `*ptr = Some(value)`, but due to
292 // the compiler will currently codegen that pattern with something like:
294 // ptr::drop_in_place(ptr)
295 // ptr::write(ptr, Some(value))
297 // Due to this pattern it's possible for the destructor of the value in
298 // `ptr` (e.g., if this is being recursively initialized) to re-access
299 // TLS, in which case there will be a `&` and `&mut` pointer to the same
300 // value (an aliasing violation). To avoid setting the "I'm running a
301 // destructor" flag we just use `mem::replace` which should sequence the
302 // operations a little differently and make this safe to call.
303 mem::replace(&mut *ptr, Some(value));
305 // After storing `Some` we want to get a reference to the contents of
306 // what we just stored. While we could use `unwrap` here and it should
307 // always work it empirically doesn't seem to always get optimized away,
308 // which means that using something like `try_with` can pull in
309 // panicking code and cause a large size bloat.
312 None => hint::unreachable_unchecked(),
317 pub unsafe fn take(&mut self) -> Option<T> {
318 (*self.inner.get()).take()
323 /// On some platforms like wasm32 there's no threads, so no need to generate
324 /// thread locals and we can instead just use plain statics!
326 #[cfg(all(target_arch = "wasm32", not(target_feature = "atomics")))]
328 use super::lazy::LazyKeyInner;
332 inner: LazyKeyInner<T>,
335 unsafe impl<T> Sync for Key<T> {}
337 impl<T> fmt::Debug for Key<T> {
338 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
344 pub const fn new() -> Key<T> {
345 Key { inner: LazyKeyInner::new() }
348 pub unsafe fn get(&self, init: fn() -> T) -> Option<&'static T> {
349 let value = match self.inner.get() {
350 Some(ref value) => value,
351 None => self.inner.initialize(init),
359 #[cfg(target_thread_local)]
361 use super::lazy::LazyKeyInner;
362 use crate::cell::Cell;
365 use crate::sys::fast_thread_local::register_dtor;
367 #[derive(Copy, Clone)]
374 // This data structure has been carefully constructed so that the fast path
375 // only contains one branch on x86. That optimization is necessary to avoid
376 // duplicated tls lookups on OSX.
378 // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
380 // If `LazyKeyInner::get` returns `None`, that indicates either:
381 // * The value has never been initialized
382 // * The value is being recursively initialized
383 // * The value has already been destroyed or is being destroyed
384 // To determine which kind of `None`, check `dtor_state`.
386 // This is very optimizer friendly for the fast path - initialized but
388 inner: LazyKeyInner<T>,
390 // Metadata to keep track of the state of the destructor. Remember that
391 // this variable is thread-local, not global.
392 dtor_state: Cell<DtorState>,
395 impl<T> fmt::Debug for Key<T> {
396 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
402 pub const fn new() -> Key<T> {
403 Key { inner: LazyKeyInner::new(), dtor_state: Cell::new(DtorState::Unregistered) }
406 pub unsafe fn get<F: FnOnce() -> T>(&self, init: F) -> Option<&'static T> {
407 match self.inner.get() {
408 Some(val) => Some(val),
409 None => self.try_initialize(init),
413 // `try_initialize` is only called once per fast thread local variable,
414 // except in corner cases where thread_local dtors reference other
415 // thread_local's, or it is being recursively initialized.
417 // Macos: Inlining this function can cause two `tlv_get_addr` calls to
418 // be performed for every call to `Key::get`. The #[cold] hint makes
420 // LLVM issue: https://bugs.llvm.org/show_bug.cgi?id=41722
422 unsafe fn try_initialize<F: FnOnce() -> T>(&self, init: F) -> Option<&'static T> {
423 if !mem::needs_drop::<T>() || self.try_register_dtor() {
424 Some(self.inner.initialize(init))
430 // `try_register_dtor` is only called once per fast thread local
431 // variable, except in corner cases where thread_local dtors reference
432 // other thread_local's, or it is being recursively initialized.
433 unsafe fn try_register_dtor(&self) -> bool {
434 match self.dtor_state.get() {
435 DtorState::Unregistered => {
436 // dtor registration happens before initialization.
437 register_dtor(self as *const _ as *mut u8, destroy_value::<T>);
438 self.dtor_state.set(DtorState::Registered);
441 DtorState::Registered => {
442 // recursively initialized
445 DtorState::RunningOrHasRun => false,
450 unsafe extern "C" fn destroy_value<T>(ptr: *mut u8) {
451 let ptr = ptr as *mut Key<T>;
453 // Right before we run the user destructor be sure to set the
454 // `Option<T>` to `None`, and `dtor_state` to `RunningOrHasRun`. This
455 // causes future calls to `get` to run `try_initialize_drop` again,
456 // which will now fail, and return `None`.
457 let value = (*ptr).inner.take();
458 (*ptr).dtor_state.set(DtorState::RunningOrHasRun);
465 use super::lazy::LazyKeyInner;
466 use crate::cell::Cell;
470 use crate::sys_common::thread_local::StaticKey as OsStaticKey;
473 // OS-TLS key that we'll use to key off.
475 marker: marker::PhantomData<Cell<T>>,
478 impl<T> fmt::Debug for Key<T> {
479 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
484 unsafe impl<T> Sync for Key<T> {}
486 struct Value<T: 'static> {
487 inner: LazyKeyInner<T>,
488 key: &'static Key<T>,
491 impl<T: 'static> Key<T> {
492 pub const fn new() -> Key<T> {
493 Key { os: OsStaticKey::new(Some(destroy_value::<T>)), marker: marker::PhantomData }
496 pub unsafe fn get(&'static self, init: fn() -> T) -> Option<&'static T> {
497 let ptr = self.os.get() as *mut Value<T>;
498 if ptr as usize > 1 {
499 if let Some(ref value) = (*ptr).inner.get() {
503 self.try_initialize(init)
506 // `try_initialize` is only called once per os thread local variable,
507 // except in corner cases where thread_local dtors reference other
508 // thread_local's, or it is being recursively initialized.
509 unsafe fn try_initialize(&'static self, init: fn() -> T) -> Option<&'static T> {
510 let ptr = self.os.get() as *mut Value<T>;
511 if ptr as usize == 1 {
512 // destructor is running
516 let ptr = if ptr.is_null() {
517 // If the lookup returned null, we haven't initialized our own
518 // local copy, so do that now.
519 let ptr: Box<Value<T>> = box Value { inner: LazyKeyInner::new(), key: self };
520 let ptr = Box::into_raw(ptr);
521 self.os.set(ptr as *mut u8);
524 // recursive initialization
528 Some((*ptr).inner.initialize(init))
532 unsafe extern "C" fn destroy_value<T: 'static>(ptr: *mut u8) {
533 // The OS TLS ensures that this key contains a NULL value when this
534 // destructor starts to run. We set it back to a sentinel value of 1 to
535 // ensure that any future calls to `get` for this thread will return
538 // Note that to prevent an infinite loop we reset it back to null right
539 // before we return from the destructor ourselves.
540 let ptr = Box::from_raw(ptr as *mut Value<T>);
542 key.os.set(1 as *mut u8);
544 key.os.set(ptr::null_mut());
548 #[cfg(all(test, not(target_os = "emscripten")))]
550 use crate::cell::{Cell, UnsafeCell};
551 use crate::sync::mpsc::{channel, Sender};
554 struct Foo(Sender<()>);
558 let Foo(ref s) = *self;
565 thread_local!(static FOO: Cell<i32> = Cell::new(1));
568 assert_eq!(f.get(), 1);
571 let (tx, rx) = channel();
572 let _t = thread::spawn(move || {
574 assert_eq!(f.get(), 1);
576 tx.send(()).unwrap();
581 assert_eq!(f.get(), 2);
590 assert!(FOO.try_with(|_| ()).is_err());
593 thread_local!(static FOO: Foo = Foo);
596 assert!(FOO.try_with(|_| ()).is_ok());
600 .expect("thread panicked");
605 thread_local!(static FOO: UnsafeCell<Option<Foo>> = UnsafeCell::new(None));
607 let (tx, rx) = channel();
608 let _t = thread::spawn(move || unsafe {
609 let mut tx = Some(tx);
611 *f.get() = Some(Foo(tx.take().unwrap()));
621 thread_local!(static K1: UnsafeCell<Option<S1>> = UnsafeCell::new(None));
622 thread_local!(static K2: UnsafeCell<Option<S2>> = UnsafeCell::new(None));
623 static mut HITS: u32 = 0;
629 if K2.try_with(|_| ()).is_err() {
633 K2.with(|s| *s.get() = Some(S2));
645 assert!(K1.try_with(|_| ()).is_ok());
647 K1.with(|s| *s.get() = Some(S1));
652 thread::spawn(move || {
657 .expect("thread panicked");
661 fn self_referential() {
663 thread_local!(static K1: UnsafeCell<Option<S1>> = UnsafeCell::new(None));
667 assert!(K1.try_with(|_| ()).is_err());
671 thread::spawn(move || unsafe {
672 K1.with(|s| *s.get() = Some(S1));
676 .expect("thread panicked");
679 // Note that this test will deadlock if TLS destructors aren't run (this
680 // requires the destructor to be run to pass the test).
682 fn dtors_in_dtors_in_dtors() {
683 struct S1(Sender<()>);
684 thread_local!(static K1: UnsafeCell<Option<S1>> = UnsafeCell::new(None));
685 thread_local!(static K2: UnsafeCell<Option<Foo>> = UnsafeCell::new(None));
689 let S1(ref tx) = *self;
691 let _ = K2.try_with(|s| *s.get() = Some(Foo(tx.clone())));
696 let (tx, rx) = channel();
697 let _t = thread::spawn(move || unsafe {
698 let mut tx = Some(tx);
699 K1.with(|s| *s.get() = Some(S1(tx.take().unwrap())));
707 use crate::cell::RefCell;
708 use crate::collections::HashMap;
712 fn square(i: i32) -> i32 {
715 thread_local!(static FOO: i32 = square(3));
724 fn map() -> RefCell<HashMap<i32, i32>> {
725 let mut m = HashMap::new();
729 thread_local!(static FOO: RefCell<HashMap<i32, i32>> = map());
732 assert_eq!(map.borrow()[&1], 2);
738 thread_local!(static FOO: RefCell<Vec<u32>> = RefCell::new(vec![1, 2, 3]));
741 assert_eq!(vec.borrow().len(), 3);
742 vec.borrow_mut().push(4);
743 assert_eq!(vec.borrow()[3], 4);