Skip to content

Commit 82dd54b

Browse files
committed
Modify CloudABI ReentrantMutex to use MaybeUninit
Remove uses of mem::uninitialized, which is now deprecated
1 parent e1e0df8 commit 82dd54b

File tree

1 file changed

+18
-14
lines changed

1 file changed

+18
-14
lines changed

src/libstd/sys/cloudabi/mutex.rs

+18-14
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
use crate::cell::UnsafeCell;
22
use crate::mem;
3+
use crate::mem::MaybeUninit;
34
use crate::sync::atomic::{AtomicU32, Ordering};
45
use crate::sys::cloudabi::abi;
56
use crate::sys::rwlock::{self, RWLock};
@@ -47,25 +48,28 @@ impl Mutex {
4748
}
4849

4950
pub struct ReentrantMutex {
50-
lock: UnsafeCell<AtomicU32>,
51-
recursion: UnsafeCell<u32>,
51+
lock: UnsafeCell<MaybeUninit<AtomicU32>>,
52+
recursion: UnsafeCell<MaybeUninit<u32>>,
5253
}
5354

5455
impl ReentrantMutex {
5556
pub unsafe fn uninitialized() -> ReentrantMutex {
56-
mem::uninitialized()
57+
ReentrantMutex {
58+
lock: UnsafeCell::new(MaybeUninit::uninit()),
59+
recursion: UnsafeCell::new(MaybeUninit::uninit())
60+
}
5761
}
5862

5963
pub unsafe fn init(&mut self) {
60-
self.lock = UnsafeCell::new(AtomicU32::new(abi::LOCK_UNLOCKED.0));
61-
self.recursion = UnsafeCell::new(0);
64+
self.lock = UnsafeCell::new(MaybeUninit::new(AtomicU32::new(abi::LOCK_UNLOCKED.0)));
65+
self.recursion = UnsafeCell::new(MaybeUninit::new(0));
6266
}
6367

6468
pub unsafe fn try_lock(&self) -> bool {
6569
// Attempt to acquire the lock.
6670
let lock = self.lock.get();
6771
let recursion = self.recursion.get();
68-
if let Err(old) = (*lock).compare_exchange(
72+
if let Err(old) = (*(*lock).as_mut_ptr()).compare_exchange(
6973
abi::LOCK_UNLOCKED.0,
7074
__pthread_thread_id.0 | abi::LOCK_WRLOCKED.0,
7175
Ordering::Acquire,
@@ -74,14 +78,14 @@ impl ReentrantMutex {
7478
// If we fail to acquire the lock, it may be the case
7579
// that we've already acquired it and may need to recurse.
7680
if old & !abi::LOCK_KERNEL_MANAGED.0 == __pthread_thread_id.0 | abi::LOCK_WRLOCKED.0 {
77-
*recursion += 1;
81+
*(*recursion).as_mut_ptr() += 1;
7882
true
7983
} else {
8084
false
8185
}
8286
} else {
8387
// Success.
84-
assert_eq!(*recursion, 0, "Mutex has invalid recursion count");
88+
assert_eq!(*(*recursion).as_mut_ptr(), 0, "Mutex has invalid recursion count");
8589
true
8690
}
8791
}
@@ -112,14 +116,14 @@ impl ReentrantMutex {
112116
let lock = self.lock.get();
113117
let recursion = self.recursion.get();
114118
assert_eq!(
115-
(*lock).load(Ordering::Relaxed) & !abi::LOCK_KERNEL_MANAGED.0,
119+
(*(*lock).as_mut_ptr()).load(Ordering::Relaxed) & !abi::LOCK_KERNEL_MANAGED.0,
116120
__pthread_thread_id.0 | abi::LOCK_WRLOCKED.0,
117121
"This mutex is locked by a different thread"
118122
);
119123

120-
if *recursion > 0 {
121-
*recursion -= 1;
122-
} else if !(*lock)
124+
if *(*recursion).as_mut_ptr() > 0 {
125+
*(*recursion).as_mut_ptr() -= 1;
126+
} else if !(*(*lock).as_mut_ptr())
123127
.compare_exchange(
124128
__pthread_thread_id.0 | abi::LOCK_WRLOCKED.0,
125129
abi::LOCK_UNLOCKED.0,
@@ -139,10 +143,10 @@ impl ReentrantMutex {
139143
let lock = self.lock.get();
140144
let recursion = self.recursion.get();
141145
assert_eq!(
142-
(*lock).load(Ordering::Relaxed),
146+
(*(*lock).as_mut_ptr()).load(Ordering::Relaxed),
143147
abi::LOCK_UNLOCKED.0,
144148
"Attempted to destroy locked mutex"
145149
);
146-
assert_eq!(*recursion, 0, "Recursion counter invalid");
150+
assert_eq!(*(*recursion).as_mut_ptr(), 0, "Recursion counter invalid");
147151
}
148152
}

0 commit comments

Comments
 (0)