//! detect recursive locks.
use cell::UnsafeCell;
-use mem;
+use mem::{self, MaybeUninit};
use sync::atomic::{AtomicUsize, Ordering};
use sys::c;
use sys::compat;
return ret;
}
-pub struct ReentrantMutex { inner: UnsafeCell<c::CRITICAL_SECTION> }
+pub struct ReentrantMutex { inner: MaybeUninit<UnsafeCell<c::CRITICAL_SECTION>> }
unsafe impl Send for ReentrantMutex {}
unsafe impl Sync for ReentrantMutex {}
impl ReentrantMutex {
- pub unsafe fn uninitialized() -> ReentrantMutex {
- mem::uninitialized()
+ pub fn uninitialized() -> ReentrantMutex {
+ MaybeUninit::uninitialized()
}
pub unsafe fn init(&mut self) {
- c::InitializeCriticalSection(self.inner.get());
+ c::InitializeCriticalSection(self.inner.get_ref().get());
}
pub unsafe fn lock(&self) {
- c::EnterCriticalSection(self.inner.get());
+ c::EnterCriticalSection(self.inner.get_ref().get());
}
#[inline]
pub unsafe fn try_lock(&self) -> bool {
- c::TryEnterCriticalSection(self.inner.get()) != 0
+ c::TryEnterCriticalSection(self.inner.get_ref().get()) != 0
}
pub unsafe fn unlock(&self) {
- c::LeaveCriticalSection(self.inner.get());
+ c::LeaveCriticalSection(self.inner.get_ref().get());
}
pub unsafe fn destroy(&self) {
- c::DeleteCriticalSection(self.inner.get());
+ c::DeleteCriticalSection(self.inner.get_ref().get());
}
}