3 //! The Windows implementation of mutexes is a little odd and it may not be
4 //! immediately obvious what's going on. The primary oddness is that SRWLock is
5 //! used instead of CriticalSection, and this is done because:
7 //! 1. SRWLock is several times faster than CriticalSection according to
8 //! benchmarks performed on both Windows 8 and Windows 7.
10 //! 2. CriticalSection allows recursive locking while SRWLock deadlocks. The
11 //! Unix implementation deadlocks so consistency is preferred. See #19962 for
14 //! 3. While CriticalSection is fair and SRWLock is not, the current Rust policy
15 //! is that there are no guarantees of fairness.
17 //! The downside of this approach, however, is that SRWLock is not available on
18 //! Windows XP, so we continue to have a fallback implementation where
19 //! CriticalSection is used and we keep track of who's holding the mutex to
20 //! detect recursive locks.
22 use crate::cell::UnsafeCell;
23 use crate::mem::{self, MaybeUninit};
24 use crate::sync::atomic::{AtomicUsize, Ordering};
26 use crate::sys::compat;
30 held: UnsafeCell<bool>,
33 unsafe impl Send for Mutex {}
34 unsafe impl Sync for Mutex {}
36 #[derive(Clone, Copy)]
43 pub unsafe fn raw(m: &Mutex) -> c::PSRWLOCK {
44 debug_assert!(mem::size_of::<c::SRWLOCK>() <= mem::size_of_val(&m.lock));
45 &m.lock as *const _ as *mut _
49 pub const fn new() -> Mutex {
51 // This works because SRWLOCK_INIT is 0 (wrapped in a struct), so we are also properly
52 // initializing an SRWLOCK here.
53 lock: AtomicUsize::new(0),
54 held: UnsafeCell::new(false),
58 pub unsafe fn init(&mut self) {}
59 pub unsafe fn lock(&self) {
61 Kind::SRWLock => c::AcquireSRWLockExclusive(raw(self)),
62 Kind::CriticalSection => {
63 let re = self.remutex();
65 if !self.flag_locked() {
67 panic!("cannot recursively lock a mutex");
72 pub unsafe fn try_lock(&self) -> bool {
74 Kind::SRWLock => c::TryAcquireSRWLockExclusive(raw(self)) != 0,
75 Kind::CriticalSection => {
76 let re = self.remutex();
77 if !(*re).try_lock() {
79 } else if self.flag_locked() {
88 pub unsafe fn unlock(&self) {
89 *self.held.get() = false;
91 Kind::SRWLock => c::ReleaseSRWLockExclusive(raw(self)),
92 Kind::CriticalSection => (*self.remutex()).unlock(),
95 pub unsafe fn destroy(&self) {
98 Kind::CriticalSection => {
99 match self.lock.load(Ordering::SeqCst) {
101 n => { Box::from_raw(n as *mut ReentrantMutex).destroy(); }
107 unsafe fn remutex(&self) -> *mut ReentrantMutex {
108 match self.lock.load(Ordering::SeqCst) {
110 n => return n as *mut _,
112 let mut re = box ReentrantMutex::uninitialized();
114 let re = Box::into_raw(re);
115 match self.lock.compare_and_swap(0, re as usize, Ordering::SeqCst) {
117 n => { Box::from_raw(re).destroy(); n as *mut _ }
121 unsafe fn flag_locked(&self) -> bool {
122 if *self.held.get() {
125 *self.held.get() = true;
133 static KIND: AtomicUsize = AtomicUsize::new(0);
135 let val = KIND.load(Ordering::SeqCst);
136 if val == Kind::SRWLock as usize {
138 } else if val == Kind::CriticalSection as usize {
139 return Kind::CriticalSection
142 let ret = match compat::lookup("kernel32", "AcquireSRWLockExclusive") {
143 None => Kind::CriticalSection,
144 Some(..) => Kind::SRWLock,
146 KIND.store(ret as usize, Ordering::SeqCst);
150 pub struct ReentrantMutex { inner: UnsafeCell<MaybeUninit<c::CRITICAL_SECTION>> }
152 unsafe impl Send for ReentrantMutex {}
153 unsafe impl Sync for ReentrantMutex {}
155 impl ReentrantMutex {
156 pub fn uninitialized() -> ReentrantMutex {
157 ReentrantMutex { inner: UnsafeCell::new(MaybeUninit::uninit()) }
160 pub unsafe fn init(&mut self) {
161 c::InitializeCriticalSection((&mut *self.inner.get()).as_mut_ptr());
164 pub unsafe fn lock(&self) {
165 c::EnterCriticalSection((&mut *self.inner.get()).as_mut_ptr());
169 pub unsafe fn try_lock(&self) -> bool {
170 c::TryEnterCriticalSection((&mut *self.inner.get()).as_mut_ptr()) != 0
173 pub unsafe fn unlock(&self) {
174 c::LeaveCriticalSection((&mut *self.inner.get()).as_mut_ptr());
177 pub unsafe fn destroy(&self) {
178 c::DeleteCriticalSection((&mut *self.inner.get()).as_mut_ptr());