summaryrefslogtreecommitdiff
path: root/src/collection
diff options
context:
space:
mode:
Diffstat (limited to 'src/collection')
-rw-r--r--src/collection/boxed.rs170
-rw-r--r--src/collection/guard.rs83
-rw-r--r--src/collection/owned.rs262
-rw-r--r--src/collection/ref.rs261
-rw-r--r--src/collection/retry.rs176
-rw-r--r--src/collection/utils.rs40
6 files changed, 702 insertions, 290 deletions
diff --git a/src/collection/boxed.rs b/src/collection/boxed.rs
index 0597e90..364ec97 100644
--- a/src/collection/boxed.rs
+++ b/src/collection/boxed.rs
@@ -1,25 +1,12 @@
use std::cell::UnsafeCell;
use std::fmt::Debug;
-use std::marker::PhantomData;
use crate::lockable::{Lockable, LockableIntoInner, OwnedLockable, RawLock, Sharable};
-use crate::Keyable;
+use crate::{Keyable, ThreadKey};
+use super::utils::ordered_contains_duplicates;
use super::{utils, BoxedLockCollection, LockGuard};
-/// returns `true` if the sorted list contains a duplicate
-#[must_use]
-fn contains_duplicates(l: &[&dyn RawLock]) -> bool {
- if l.is_empty() {
- // Return early to prevent panic in the below call to `windows`
- return false;
- }
-
- l.windows(2)
- // NOTE: addr_eq is necessary because eq would also compare the v-table pointers
- .any(|window| std::ptr::addr_eq(window[0], window[1]))
-}
-
unsafe impl<L: Lockable> RawLock for BoxedLockCollection<L> {
#[mutants::skip] // this should never be called
#[cfg(not(tarpaulin_include))]
@@ -65,6 +52,11 @@ unsafe impl<L: Lockable> Lockable for BoxedLockCollection<L> {
where
Self: 'g;
+ type DataMut<'a>
+ = L::DataMut<'a>
+ where
+ Self: 'a;
+
fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
ptrs.extend(self.locks())
}
@@ -72,6 +64,10 @@ unsafe impl<L: Lockable> Lockable for BoxedLockCollection<L> {
unsafe fn guard(&self) -> Self::Guard<'_> {
self.child().guard()
}
+
+ unsafe fn data_mut(&self) -> Self::DataMut<'_> {
+ self.child().data_mut()
+ }
}
unsafe impl<L: Sharable> Sharable for BoxedLockCollection<L> {
@@ -80,9 +76,18 @@ unsafe impl<L: Sharable> Sharable for BoxedLockCollection<L> {
where
Self: 'g;
+ type DataRef<'a>
+ = L::DataRef<'a>
+ where
+ Self: 'a;
+
unsafe fn read_guard(&self) -> Self::ReadGuard<'_> {
self.child().read_guard()
}
+
+ unsafe fn data_ref(&self) -> Self::DataRef<'_> {
+ self.child().data_ref()
+ }
}
unsafe impl<L: OwnedLockable> OwnedLockable for BoxedLockCollection<L> {}
@@ -352,13 +357,53 @@ impl<L: Lockable> BoxedLockCollection<L> {
// safety: we are checking for duplicates before returning
unsafe {
let this = Self::new_unchecked(data);
- if contains_duplicates(this.locks()) {
+ if ordered_contains_duplicates(this.locks()) {
return None;
}
Some(this)
}
}
+ pub fn scoped_lock<R>(&self, key: impl Keyable, f: impl Fn(L::DataMut<'_>) -> R) -> R {
+ unsafe {
+ // safety: we have the thread key
+ self.raw_lock();
+
+ // safety: the data was just locked
+ let r = f(self.data_mut());
+
+ // safety: the collection is still locked
+ self.raw_unlock();
+
+ drop(key); // ensure the key stays alive long enough
+
+ r
+ }
+ }
+
+ pub fn scoped_try_lock<Key: Keyable, R>(
+ &self,
+ key: Key,
+ f: impl Fn(L::DataMut<'_>) -> R,
+ ) -> Result<R, Key> {
+ unsafe {
+ // safety: we have the thread key
+ if !self.raw_try_lock() {
+ return Err(key);
+ }
+
+ // safety: we just locked the collection
+ let r = f(self.data_mut());
+
+ // safety: the collection is still locked
+ self.raw_unlock();
+
+ drop(key); // ensures the key stays valid long enough
+
+ Ok(r)
+ }
+ }
+
/// Locks the collection
///
/// This function returns a guard that can be used to access the underlying
@@ -378,10 +423,8 @@ impl<L: Lockable> BoxedLockCollection<L> {
/// *guard.0 += 1;
/// *guard.1 = "1";
/// ```
- pub fn lock<'g, 'key: 'g, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> LockGuard<'key, L::Guard<'g>, Key> {
+ #[must_use]
+ pub fn lock(&self, key: ThreadKey) -> LockGuard<L::Guard<'_>> {
unsafe {
// safety: we have the thread key
self.raw_lock();
@@ -390,7 +433,6 @@ impl<L: Lockable> BoxedLockCollection<L> {
// safety: we've already acquired the lock
guard: self.child().guard(),
key,
- _phantom: PhantomData,
}
}
}
@@ -424,10 +466,7 @@ impl<L: Lockable> BoxedLockCollection<L> {
/// };
///
/// ```
- pub fn try_lock<'g, 'key: 'g, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> Result<LockGuard<'key, L::Guard<'g>, Key>, Key> {
+ pub fn try_lock(&self, key: ThreadKey) -> Result<LockGuard<L::Guard<'_>>, ThreadKey> {
let guard = unsafe {
if !self.raw_try_lock() {
return Err(key);
@@ -437,11 +476,7 @@ impl<L: Lockable> BoxedLockCollection<L> {
self.child().guard()
};
- Ok(LockGuard {
- guard,
- key,
- _phantom: PhantomData,
- })
+ Ok(LockGuard { guard, key })
}
/// Unlocks the underlying lockable data type, returning the key that's
@@ -461,13 +496,53 @@ impl<L: Lockable> BoxedLockCollection<L> {
/// *guard.1 = "1";
/// let key = LockCollection::<(Mutex<i32>, Mutex<&str>)>::unlock(guard);
/// ```
- pub fn unlock<'key, Key: Keyable + 'key>(guard: LockGuard<'key, L::Guard<'_>, Key>) -> Key {
+ pub fn unlock(guard: LockGuard<L::Guard<'_>>) -> ThreadKey {
drop(guard.guard);
guard.key
}
}
impl<L: Sharable> BoxedLockCollection<L> {
+ pub fn scoped_read<R>(&self, key: impl Keyable, f: impl Fn(L::DataRef<'_>) -> R) -> R {
+ unsafe {
+ // safety: we have the thread key
+ self.raw_read();
+
+ // safety: the data was just locked
+ let r = f(self.data_ref());
+
+ // safety: the collection is still locked
+ self.raw_unlock_read();
+
+ drop(key); // ensure the key stays alive long enough
+
+ r
+ }
+ }
+
+ pub fn scoped_try_read<Key: Keyable, R>(
+ &self,
+ key: Key,
+ f: impl Fn(L::DataRef<'_>) -> R,
+ ) -> Result<R, Key> {
+ unsafe {
+ // safety: we have the thread key
+ if !self.raw_try_read() {
+ return Err(key);
+ }
+
+ // safety: we just locked the collection
+ let r = f(self.data_ref());
+
+ // safety: the collection is still locked
+ self.raw_unlock_read();
+
+ drop(key); // ensures the key stays valid long enough
+
+ Ok(r)
+ }
+ }
+
/// Locks the collection, so that other threads can still read from it
///
/// This function returns a guard that can be used to access the underlying
@@ -487,10 +562,8 @@ impl<L: Sharable> BoxedLockCollection<L> {
/// assert_eq!(*guard.0, 0);
/// assert_eq!(*guard.1, "");
/// ```
- pub fn read<'g, 'key: 'g, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> LockGuard<'key, L::ReadGuard<'g>, Key> {
+ #[must_use]
+ pub fn read(&self, key: ThreadKey) -> LockGuard<L::ReadGuard<'_>> {
unsafe {
// safety: we have the thread key
self.raw_read();
@@ -499,7 +572,6 @@ impl<L: Sharable> BoxedLockCollection<L> {
// safety: we've already acquired the lock
guard: self.child().read_guard(),
key,
- _phantom: PhantomData,
}
}
}
@@ -534,10 +606,7 @@ impl<L: Sharable> BoxedLockCollection<L> {
/// };
///
/// ```
- pub fn try_read<'g, 'key: 'g, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> Result<LockGuard<'key, L::ReadGuard<'g>, Key>, Key> {
+ pub fn try_read(&self, key: ThreadKey) -> Result<LockGuard<L::ReadGuard<'_>>, ThreadKey> {
let guard = unsafe {
// safety: we have the thread key
if !self.raw_try_read() {
@@ -548,11 +617,7 @@ impl<L: Sharable> BoxedLockCollection<L> {
self.child().read_guard()
};
- Ok(LockGuard {
- guard,
- key,
- _phantom: PhantomData,
- })
+ Ok(LockGuard { guard, key })
}
/// Unlocks the underlying lockable data type, returning the key that's
@@ -570,9 +635,7 @@ impl<L: Sharable> BoxedLockCollection<L> {
/// let mut guard = lock.read(key);
/// let key = LockCollection::<(RwLock<i32>, RwLock<&str>)>::unlock_read(guard);
/// ```
- pub fn unlock_read<'key, Key: Keyable + 'key>(
- guard: LockGuard<'key, L::ReadGuard<'_>, Key>,
- ) -> Key {
+ pub fn unlock_read(guard: LockGuard<L::ReadGuard<'_>>) -> ThreadKey {
drop(guard.guard);
guard.key
}
@@ -635,7 +698,6 @@ mod tests {
.into_iter()
.collect();
let guard = collection.lock(key);
- // TODO impl PartialEq<T> for MutexRef<T>
assert_eq!(*guard[0], "foo");
assert_eq!(*guard[1], "bar");
assert_eq!(*guard[2], "baz");
@@ -647,7 +709,6 @@ mod tests {
let collection =
BoxedLockCollection::from([Mutex::new("foo"), Mutex::new("bar"), Mutex::new("baz")]);
let guard = collection.lock(key);
- // TODO impl PartialEq<T> for MutexRef<T>
assert_eq!(*guard[0], "foo");
assert_eq!(*guard[1], "bar");
assert_eq!(*guard[2], "baz");
@@ -666,7 +727,7 @@ mod tests {
let mut key = ThreadKey::get().unwrap();
let collection = BoxedLockCollection::new([Mutex::new(0), Mutex::new(1), Mutex::new(2)]);
for (i, mutex) in (&collection).into_iter().enumerate() {
- assert_eq!(*mutex.lock(&mut key), i);
+ mutex.scoped_lock(&mut key, |val| assert_eq!(*val, i))
}
}
@@ -675,7 +736,7 @@ mod tests {
let mut key = ThreadKey::get().unwrap();
let collection = BoxedLockCollection::new([Mutex::new(0), Mutex::new(1), Mutex::new(2)]);
for (i, mutex) in collection.iter().enumerate() {
- assert_eq!(*mutex.lock(&mut key), i);
+ mutex.scoped_lock(&mut key, |val| assert_eq!(*val, i))
}
}
@@ -704,11 +765,6 @@ mod tests {
}
#[test]
- fn contains_duplicates_empty() {
- assert!(!contains_duplicates(&[]))
- }
-
- #[test]
fn try_lock_works() {
let key = ThreadKey::get().unwrap();
let collection = BoxedLockCollection::new([Mutex::new(1), Mutex::new(2)]);
diff --git a/src/collection/guard.rs b/src/collection/guard.rs
index eea13ed..78d9895 100644
--- a/src/collection/guard.rs
+++ b/src/collection/guard.rs
@@ -2,41 +2,11 @@ use std::fmt::{Debug, Display};
use std::hash::Hash;
use std::ops::{Deref, DerefMut};
-use crate::key::Keyable;
-
use super::LockGuard;
-#[mutants::skip] // it's hard to get two guards safely
-#[cfg(not(tarpaulin_include))]
-impl<Guard: PartialEq, Key: Keyable> PartialEq for LockGuard<'_, Guard, Key> {
- fn eq(&self, other: &Self) -> bool {
- self.guard.eq(&other.guard)
- }
-}
-
-#[mutants::skip] // it's hard to get two guards safely
-#[cfg(not(tarpaulin_include))]
-impl<Guard: PartialOrd, Key: Keyable> PartialOrd for LockGuard<'_, Guard, Key> {
- fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
- self.guard.partial_cmp(&other.guard)
- }
-}
-
-#[mutants::skip] // it's hard to get two guards safely
-#[cfg(not(tarpaulin_include))]
-impl<Guard: Eq, Key: Keyable> Eq for LockGuard<'_, Guard, Key> {}
-
-#[mutants::skip] // it's hard to get two guards safely
-#[cfg(not(tarpaulin_include))]
-impl<Guard: Ord, Key: Keyable> Ord for LockGuard<'_, Guard, Key> {
- fn cmp(&self, other: &Self) -> std::cmp::Ordering {
- self.guard.cmp(&other.guard)
- }
-}
-
#[mutants::skip] // hashing involves RNG and is hard to test
#[cfg(not(tarpaulin_include))]
-impl<Guard: Hash, Key: Keyable> Hash for LockGuard<'_, Guard, Key> {
+impl<Guard: Hash> Hash for LockGuard<Guard> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.guard.hash(state)
}
@@ -44,19 +14,19 @@ impl<Guard: Hash, Key: Keyable> Hash for LockGuard<'_, Guard, Key> {
#[mutants::skip]
#[cfg(not(tarpaulin_include))]
-impl<Guard: Debug, Key: Keyable> Debug for LockGuard<'_, Guard, Key> {
+impl<Guard: Debug> Debug for LockGuard<Guard> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&**self, f)
}
}
-impl<Guard: Display, Key: Keyable> Display for LockGuard<'_, Guard, Key> {
+impl<Guard: Display> Display for LockGuard<Guard> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Display::fmt(&**self, f)
}
}
-impl<Guard, Key: Keyable> Deref for LockGuard<'_, Guard, Key> {
+impl<Guard> Deref for LockGuard<Guard> {
type Target = Guard;
fn deref(&self) -> &Self::Target {
@@ -64,19 +34,19 @@ impl<Guard, Key: Keyable> Deref for LockGuard<'_, Guard, Key> {
}
}
-impl<Guard, Key: Keyable> DerefMut for LockGuard<'_, Guard, Key> {
+impl<Guard> DerefMut for LockGuard<Guard> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.guard
}
}
-impl<Guard, Key: Keyable> AsRef<Guard> for LockGuard<'_, Guard, Key> {
+impl<Guard> AsRef<Guard> for LockGuard<Guard> {
fn as_ref(&self) -> &Guard {
&self.guard
}
}
-impl<Guard, Key: Keyable> AsMut<Guard> for LockGuard<'_, Guard, Key> {
+impl<Guard> AsMut<Guard> for LockGuard<Guard> {
fn as_mut(&mut self) -> &mut Guard {
&mut self.guard
}
@@ -97,56 +67,53 @@ mod tests {
#[test]
fn deref_mut_works() {
- let mut key = ThreadKey::get().unwrap();
+ let key = ThreadKey::get().unwrap();
let locks = (Mutex::new(1), Mutex::new(2));
let lock = LockCollection::new_ref(&locks);
- let mut guard = lock.lock(&mut key);
+ let mut guard = lock.lock(key);
*guard.0 = 3;
- drop(guard);
+ let key = LockCollection::<(Mutex<_>, Mutex<_>)>::unlock(guard);
- let guard = locks.0.lock(&mut key);
+ let guard = locks.0.lock(key);
assert_eq!(*guard, 3);
- drop(guard);
+ let key = Mutex::unlock(guard);
- let guard = locks.1.lock(&mut key);
+ let guard = locks.1.lock(key);
assert_eq!(*guard, 2);
- drop(guard);
}
#[test]
fn as_ref_works() {
- let mut key = ThreadKey::get().unwrap();
+ let key = ThreadKey::get().unwrap();
let locks = (Mutex::new(1), Mutex::new(2));
let lock = LockCollection::new_ref(&locks);
- let mut guard = lock.lock(&mut key);
+ let mut guard = lock.lock(key);
*guard.0 = 3;
- drop(guard);
+ let key = LockCollection::<(Mutex<_>, Mutex<_>)>::unlock(guard);
- let guard = locks.0.lock(&mut key);
+ let guard = locks.0.lock(key);
assert_eq!(guard.as_ref(), &3);
- drop(guard);
+ let key = Mutex::unlock(guard);
- let guard = locks.1.lock(&mut key);
+ let guard = locks.1.lock(key);
assert_eq!(guard.as_ref(), &2);
- drop(guard);
}
#[test]
fn as_mut_works() {
- let mut key = ThreadKey::get().unwrap();
+ let key = ThreadKey::get().unwrap();
let locks = (Mutex::new(1), Mutex::new(2));
let lock = LockCollection::new_ref(&locks);
- let mut guard = lock.lock(&mut key);
+ let mut guard = lock.lock(key);
let guard_mut = guard.as_mut();
*guard_mut.0 = 3;
- drop(guard);
+ let key = LockCollection::<(Mutex<_>, Mutex<_>)>::unlock(guard);
- let guard = locks.0.lock(&mut key);
+ let guard = locks.0.lock(key);
assert_eq!(guard.as_ref(), &3);
- drop(guard);
+ let key = Mutex::unlock(guard);
- let guard = locks.1.lock(&mut key);
+ let guard = locks.1.lock(key);
assert_eq!(guard.as_ref(), &2);
- drop(guard);
}
}
diff --git a/src/collection/owned.rs b/src/collection/owned.rs
index c345b43..b9cf313 100644
--- a/src/collection/owned.rs
+++ b/src/collection/owned.rs
@@ -1,57 +1,47 @@
-use std::marker::PhantomData;
-
use crate::lockable::{
Lockable, LockableGetMut, LockableIntoInner, OwnedLockable, RawLock, Sharable,
};
-use crate::Keyable;
+use crate::{Keyable, ThreadKey};
use super::{utils, LockGuard, OwnedLockCollection};
-#[mutants::skip] // it's hard to test individual locks in an OwnedLockCollection
-#[cfg(not(tarpaulin_include))]
-fn get_locks<L: Lockable>(data: &L) -> Vec<&dyn RawLock> {
- let mut locks = Vec::new();
- data.get_ptrs(&mut locks);
- locks
-}
-
unsafe impl<L: Lockable> RawLock for OwnedLockCollection<L> {
#[mutants::skip] // this should never run
#[cfg(not(tarpaulin_include))]
fn poison(&self) {
- let locks = get_locks(&self.data);
+ let locks = utils::get_locks_unsorted(&self.data);
for lock in locks {
lock.poison();
}
}
unsafe fn raw_lock(&self) {
- utils::ordered_lock(&get_locks(&self.data))
+ utils::ordered_lock(&utils::get_locks_unsorted(&self.data))
}
unsafe fn raw_try_lock(&self) -> bool {
- let locks = get_locks(&self.data);
+ let locks = utils::get_locks_unsorted(&self.data);
utils::ordered_try_lock(&locks)
}
unsafe fn raw_unlock(&self) {
- let locks = get_locks(&self.data);
+ let locks = utils::get_locks_unsorted(&self.data);
for lock in locks {
lock.raw_unlock();
}
}
unsafe fn raw_read(&self) {
- utils::ordered_read(&get_locks(&self.data))
+ utils::ordered_read(&utils::get_locks_unsorted(&self.data))
}
unsafe fn raw_try_read(&self) -> bool {
- let locks = get_locks(&self.data);
+ let locks = utils::get_locks_unsorted(&self.data);
utils::ordered_try_read(&locks)
}
unsafe fn raw_unlock_read(&self) {
- let locks = get_locks(&self.data);
+ let locks = utils::get_locks_unsorted(&self.data);
for lock in locks {
lock.raw_unlock_read();
}
@@ -64,6 +54,11 @@ unsafe impl<L: Lockable> Lockable for OwnedLockCollection<L> {
where
Self: 'g;
+ type DataMut<'a>
+ = L::DataMut<'a>
+ where
+ Self: 'a;
+
#[mutants::skip] // It's hard to test lkocks in an OwnedLockCollection, because they're owned
#[cfg(not(tarpaulin_include))]
fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
@@ -73,6 +68,10 @@ unsafe impl<L: Lockable> Lockable for OwnedLockCollection<L> {
unsafe fn guard(&self) -> Self::Guard<'_> {
self.data.guard()
}
+
+ unsafe fn data_mut(&self) -> Self::DataMut<'_> {
+ self.data.data_mut()
+ }
}
impl<L: LockableGetMut> LockableGetMut for OwnedLockCollection<L> {
@@ -100,9 +99,18 @@ unsafe impl<L: Sharable> Sharable for OwnedLockCollection<L> {
where
Self: 'g;
+ type DataRef<'a>
+ = L::DataRef<'a>
+ where
+ Self: 'a;
+
unsafe fn read_guard(&self) -> Self::ReadGuard<'_> {
self.data.read_guard()
}
+
+ unsafe fn data_ref(&self) -> Self::DataRef<'_> {
+ self.data.data_ref()
+ }
}
unsafe impl<L: OwnedLockable> OwnedLockable for OwnedLockCollection<L> {}
@@ -177,6 +185,46 @@ impl<L: OwnedLockable> OwnedLockCollection<L> {
Self { data }
}
+ pub fn scoped_lock<R>(&self, key: impl Keyable, f: impl Fn(L::DataMut<'_>) -> R) -> R {
+ unsafe {
+ // safety: we have the thread key
+ self.raw_lock();
+
+ // safety: the data was just locked
+ let r = f(self.data_mut());
+
+ // safety: the collection is still locked
+ self.raw_unlock();
+
+ drop(key); // ensure the key stays alive long enough
+
+ r
+ }
+ }
+
+ pub fn scoped_try_lock<Key: Keyable, R>(
+ &self,
+ key: Key,
+ f: impl Fn(L::DataMut<'_>) -> R,
+ ) -> Result<R, Key> {
+ unsafe {
+ // safety: we have the thread key
+ if !self.raw_try_lock() {
+ return Err(key);
+ }
+
+ // safety: we just locked the collection
+ let r = f(self.data_mut());
+
+ // safety: the collection is still locked
+ self.raw_unlock();
+
+ drop(key); // ensures the key stays valid long enough
+
+ Ok(r)
+ }
+ }
+
/// Locks the collection
///
/// This function returns a guard that can be used to access the underlying
@@ -197,10 +245,7 @@ impl<L: OwnedLockable> OwnedLockCollection<L> {
/// *guard.0 += 1;
/// *guard.1 = "1";
/// ```
- pub fn lock<'g, 'key: 'g, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> LockGuard<'key, L::Guard<'g>, Key> {
+ pub fn lock(&self, key: ThreadKey) -> LockGuard<L::Guard<'_>> {
let guard = unsafe {
// safety: we have the thread key, and these locks happen in a
// predetermined order
@@ -210,11 +255,7 @@ impl<L: OwnedLockable> OwnedLockCollection<L> {
self.data.guard()
};
- LockGuard {
- guard,
- key,
- _phantom: PhantomData,
- }
+ LockGuard { guard, key }
}
/// Attempts to lock the without blocking.
@@ -247,10 +288,7 @@ impl<L: OwnedLockable> OwnedLockCollection<L> {
/// };
///
/// ```
- pub fn try_lock<'g, 'key: 'g, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> Result<LockGuard<'key, L::Guard<'g>, Key>, Key> {
+ pub fn try_lock(&self, key: ThreadKey) -> Result<LockGuard<L::Guard<'_>>, ThreadKey> {
let guard = unsafe {
if !self.raw_try_lock() {
return Err(key);
@@ -260,11 +298,7 @@ impl<L: OwnedLockable> OwnedLockCollection<L> {
self.data.guard()
};
- Ok(LockGuard {
- guard,
- key,
- _phantom: PhantomData,
- })
+ Ok(LockGuard { guard, key })
}
/// Unlocks the underlying lockable data type, returning the key that's
@@ -286,15 +320,53 @@ impl<L: OwnedLockable> OwnedLockCollection<L> {
/// let key = OwnedLockCollection::<(Mutex<i32>, Mutex<&str>)>::unlock(guard);
/// ```
#[allow(clippy::missing_const_for_fn)]
- pub fn unlock<'g, 'key: 'g, Key: Keyable + 'key>(
- guard: LockGuard<'key, L::Guard<'g>, Key>,
- ) -> Key {
+ pub fn unlock(guard: LockGuard<L::Guard<'_>>) -> ThreadKey {
drop(guard.guard);
guard.key
}
}
impl<L: Sharable> OwnedLockCollection<L> {
+ pub fn scoped_read<R>(&self, key: impl Keyable, f: impl Fn(L::DataRef<'_>) -> R) -> R {
+ unsafe {
+ // safety: we have the thread key
+ self.raw_read();
+
+ // safety: the data was just locked
+ let r = f(self.data_ref());
+
+ // safety: the collection is still locked
+ self.raw_unlock_read();
+
+ drop(key); // ensure the key stays alive long enough
+
+ r
+ }
+ }
+
+ pub fn scoped_try_read<Key: Keyable, R>(
+ &self,
+ key: Key,
+ f: impl Fn(L::DataRef<'_>) -> R,
+ ) -> Result<R, Key> {
+ unsafe {
+ // safety: we have the thread key
+ if !self.raw_try_read() {
+ return Err(key);
+ }
+
+ // safety: we just locked the collection
+ let r = f(self.data_ref());
+
+ // safety: the collection is still locked
+ self.raw_unlock_read();
+
+ drop(key); // ensures the key stays valid long enough
+
+ Ok(r)
+ }
+ }
+
/// Locks the collection, so that other threads can still read from it
///
/// This function returns a guard that can be used to access the underlying
@@ -315,10 +387,7 @@ impl<L: Sharable> OwnedLockCollection<L> {
/// assert_eq!(*guard.0, 0);
/// assert_eq!(*guard.1, "");
/// ```
- pub fn read<'g, 'key: 'g, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> LockGuard<'key, L::ReadGuard<'g>, Key> {
+ pub fn read(&self, key: ThreadKey) -> LockGuard<L::ReadGuard<'_>> {
unsafe {
// safety: we have the thread key
self.raw_read();
@@ -327,7 +396,6 @@ impl<L: Sharable> OwnedLockCollection<L> {
// safety: we've already acquired the lock
guard: self.data.read_guard(),
key,
- _phantom: PhantomData,
}
}
}
@@ -355,33 +423,26 @@ impl<L: Sharable> OwnedLockCollection<L> {
/// let lock = OwnedLockCollection::new(data);
///
/// match lock.try_read(key) {
- /// Some(mut guard) => {
+ /// Ok(mut guard) => {
/// assert_eq!(*guard.0, 5);
/// assert_eq!(*guard.1, "6");
/// },
- /// None => unreachable!(),
+ /// Err(_) => unreachable!(),
/// };
///
/// ```
- pub fn try_read<'g, 'key: 'g, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> Option<LockGuard<'key, L::ReadGuard<'g>, Key>> {
+ pub fn try_read(&self, key: ThreadKey) -> Result<LockGuard<L::ReadGuard<'_>>, ThreadKey> {
let guard = unsafe {
// safety: we have the thread key
if !self.raw_try_read() {
- return None;
+ return Err(key);
}
// safety: we've acquired the locks
self.data.read_guard()
};
- Some(LockGuard {
- guard,
- key,
- _phantom: PhantomData,
- })
+ Ok(LockGuard { guard, key })
}
/// Unlocks the underlying lockable data type, returning the key that's
@@ -401,9 +462,7 @@ impl<L: Sharable> OwnedLockCollection<L> {
/// let key = OwnedLockCollection::<(RwLock<i32>, RwLock<&str>)>::unlock_read(guard);
/// ```
#[allow(clippy::missing_const_for_fn)]
- pub fn unlock_read<'g, 'key: 'g, Key: Keyable + 'key>(
- guard: LockGuard<'key, L::ReadGuard<'g>, Key>,
- ) -> Key {
+ pub fn unlock_read(guard: LockGuard<L::ReadGuard<'_>>) -> ThreadKey {
drop(guard.guard);
guard.key
}
@@ -495,7 +554,90 @@ impl<L: LockableIntoInner> OwnedLockCollection<L> {
#[cfg(test)]
mod tests {
use super::*;
- use crate::Mutex;
+ use crate::{Mutex, ThreadKey};
+
+ #[test]
+ fn get_mut_applies_changes() {
+ let key = ThreadKey::get().unwrap();
+ let mut collection = OwnedLockCollection::new([Mutex::new("foo"), Mutex::new("bar")]);
+ assert_eq!(*collection.get_mut()[0], "foo");
+ assert_eq!(*collection.get_mut()[1], "bar");
+ *collection.get_mut()[0] = "baz";
+
+ let guard = collection.lock(key);
+ assert_eq!(*guard[0], "baz");
+ assert_eq!(*guard[1], "bar");
+ }
+
+ #[test]
+ fn into_inner_works() {
+ let key = ThreadKey::get().unwrap();
+ let collection = OwnedLockCollection::from([Mutex::new("foo")]);
+ let mut guard = collection.lock(key);
+ *guard[0] = "bar";
+ drop(guard);
+
+ let array = collection.into_inner();
+ assert_eq!(array.len(), 1);
+ assert_eq!(array[0], "bar");
+ }
+
+ #[test]
+ fn from_into_iter_is_correct() {
+ let array = [Mutex::new(0), Mutex::new(1), Mutex::new(2), Mutex::new(3)];
+ let mut collection: OwnedLockCollection<Vec<Mutex<usize>>> = array.into_iter().collect();
+ assert_eq!(collection.get_mut().len(), 4);
+ for (i, lock) in collection.into_iter().enumerate() {
+ assert_eq!(lock.into_inner(), i);
+ }
+ }
+
+ #[test]
+ fn from_iter_is_correct() {
+ let array = [Mutex::new(0), Mutex::new(1), Mutex::new(2), Mutex::new(3)];
+ let mut collection: OwnedLockCollection<Vec<Mutex<usize>>> = array.into_iter().collect();
+ let collection: &mut Vec<_> = collection.as_mut();
+ assert_eq!(collection.len(), 4);
+ for (i, lock) in collection.iter_mut().enumerate() {
+ assert_eq!(*lock.get_mut(), i);
+ }
+ }
+
+ #[test]
+ fn try_lock_works_on_unlocked() {
+ let key = ThreadKey::get().unwrap();
+ let collection = OwnedLockCollection::new((Mutex::new(0), Mutex::new(1)));
+ let guard = collection.try_lock(key).unwrap();
+ assert_eq!(*guard.0, 0);
+ assert_eq!(*guard.1, 1);
+ }
+
+ #[test]
+ fn try_lock_fails_on_locked() {
+ let key = ThreadKey::get().unwrap();
+ let collection = OwnedLockCollection::new((Mutex::new(0), Mutex::new(1)));
+
+ std::thread::scope(|s| {
+ s.spawn(|| {
+ let key = ThreadKey::get().unwrap();
+ #[allow(unused)]
+ let guard = collection.lock(key);
+ std::mem::forget(guard);
+ });
+ });
+
+ assert!(collection.try_lock(key).is_err());
+ }
+
+ #[test]
+ fn default_works() {
+ type MyCollection = OwnedLockCollection<(Mutex<i32>, Mutex<Option<i32>>, Mutex<String>)>;
+ let collection = MyCollection::default();
+ let inner = collection.into_inner();
+ assert_eq!(inner.0, 0);
+ assert_eq!(inner.1, None);
+ assert_eq!(inner.2, String::new());
+ }
#[test]
fn can_be_extended() {
diff --git a/src/collection/ref.rs b/src/collection/ref.rs
index c86f298..b68b72f 100644
--- a/src/collection/ref.rs
+++ b/src/collection/ref.rs
@@ -1,32 +1,11 @@
use std::fmt::Debug;
-use std::marker::PhantomData;
use crate::lockable::{Lockable, OwnedLockable, RawLock, Sharable};
-use crate::Keyable;
+use crate::{Keyable, ThreadKey};
+use super::utils::{get_locks, ordered_contains_duplicates};
use super::{utils, LockGuard, RefLockCollection};
-#[must_use]
-pub fn get_locks<L: Lockable>(data: &L) -> Vec<&dyn RawLock> {
- let mut locks = Vec::new();
- data.get_ptrs(&mut locks);
- locks.sort_by_key(|lock| &raw const **lock);
- locks
-}
-
-/// returns `true` if the sorted list contains a duplicate
-#[must_use]
-fn contains_duplicates(l: &[&dyn RawLock]) -> bool {
- if l.is_empty() {
- // Return early to prevent panic in the below call to `windows`
- return false;
- }
-
- l.windows(2)
- // NOTE: addr_eq is necessary because eq would also compare the v-table pointers
- .any(|window| std::ptr::addr_eq(window[0], window[1]))
-}
-
impl<'a, L> IntoIterator for &'a RefLockCollection<'a, L>
where
&'a L: IntoIterator,
@@ -83,6 +62,11 @@ unsafe impl<L: Lockable> Lockable for RefLockCollection<'_, L> {
where
Self: 'g;
+ type DataMut<'a>
+ = L::DataMut<'a>
+ where
+ Self: 'a;
+
fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
ptrs.extend_from_slice(&self.locks);
}
@@ -90,6 +74,10 @@ unsafe impl<L: Lockable> Lockable for RefLockCollection<'_, L> {
unsafe fn guard(&self) -> Self::Guard<'_> {
self.data.guard()
}
+
+ unsafe fn data_mut(&self) -> Self::DataMut<'_> {
+ self.data.data_mut()
+ }
}
unsafe impl<L: Sharable> Sharable for RefLockCollection<'_, L> {
@@ -98,9 +86,18 @@ unsafe impl<L: Sharable> Sharable for RefLockCollection<'_, L> {
where
Self: 'g;
+ type DataRef<'a>
+ = L::DataRef<'a>
+ where
+ Self: 'a;
+
unsafe fn read_guard(&self) -> Self::ReadGuard<'_> {
self.data.read_guard()
}
+
+ unsafe fn data_ref(&self) -> Self::DataRef<'_> {
+ self.data.data_ref()
+ }
}
impl<T, L: AsRef<T>> AsRef<T> for RefLockCollection<'_, L> {
@@ -230,13 +227,53 @@ impl<'a, L: Lockable> RefLockCollection<'a, L> {
#[must_use]
pub fn try_new(data: &'a L) -> Option<Self> {
let locks = get_locks(data);
- if contains_duplicates(&locks) {
+ if ordered_contains_duplicates(&locks) {
return None;
}
Some(Self { data, locks })
}
+ pub fn scoped_lock<R>(&self, key: impl Keyable, f: impl Fn(L::DataMut<'_>) -> R) -> R {
+ unsafe {
+ // safety: we have the thread key
+ self.raw_lock();
+
+ // safety: the data was just locked
+ let r = f(self.data_mut());
+
+ // safety: the collection is still locked
+ self.raw_unlock();
+
+ drop(key); // ensure the key stays alive long enough
+
+ r
+ }
+ }
+
+ pub fn scoped_try_lock<Key: Keyable, R>(
+ &self,
+ key: Key,
+ f: impl Fn(L::DataMut<'_>) -> R,
+ ) -> Result<R, Key> {
+ unsafe {
+ // safety: we have the thread key
+ if !self.raw_try_lock() {
+ return Err(key);
+ }
+
+ // safety: we just locked the collection
+ let r = f(self.data_mut());
+
+ // safety: the collection is still locked
+ self.raw_unlock();
+
+ drop(key); // ensures the key stays valid long enough
+
+ Ok(r)
+ }
+ }
+
/// Locks the collection
///
/// This function returns a guard that can be used to access the underlying
@@ -257,10 +294,8 @@ impl<'a, L: Lockable> RefLockCollection<'a, L> {
/// *guard.0 += 1;
/// *guard.1 = "1";
/// ```
- pub fn lock<'g, 'key: 'g, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> LockGuard<'key, L::Guard<'g>, Key> {
+ #[must_use]
+ pub fn lock(&self, key: ThreadKey) -> LockGuard<L::Guard<'_>> {
let guard = unsafe {
// safety: we have the thread key
self.raw_lock();
@@ -269,11 +304,7 @@ impl<'a, L: Lockable> RefLockCollection<'a, L> {
self.data.guard()
};
- LockGuard {
- guard,
- key,
- _phantom: PhantomData,
- }
+ LockGuard { guard, key }
}
/// Attempts to lock the without blocking.
@@ -306,10 +337,7 @@ impl<'a, L: Lockable> RefLockCollection<'a, L> {
/// };
///
/// ```
- pub fn try_lock<'g, 'key: 'a, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> Result<LockGuard<'key, L::Guard<'g>, Key>, Key> {
+ pub fn try_lock(&self, key: ThreadKey) -> Result<LockGuard<L::Guard<'_>>, ThreadKey> {
let guard = unsafe {
if !self.raw_try_lock() {
return Err(key);
@@ -319,11 +347,7 @@ impl<'a, L: Lockable> RefLockCollection<'a, L> {
self.data.guard()
};
- Ok(LockGuard {
- guard,
- key,
- _phantom: PhantomData,
- })
+ Ok(LockGuard { guard, key })
}
/// Unlocks the underlying lockable data type, returning the key that's
@@ -345,13 +369,53 @@ impl<'a, L: Lockable> RefLockCollection<'a, L> {
/// let key = RefLockCollection::<(Mutex<i32>, Mutex<&str>)>::unlock(guard);
/// ```
#[allow(clippy::missing_const_for_fn)]
- pub fn unlock<'g, 'key, Key: Keyable + 'key>(guard: LockGuard<'key, L::Guard<'g>, Key>) -> Key {
+ pub fn unlock(guard: LockGuard<L::Guard<'_>>) -> ThreadKey {
drop(guard.guard);
guard.key
}
}
-impl<'a, L: Sharable> RefLockCollection<'a, L> {
+impl<L: Sharable> RefLockCollection<'_, L> {
+ pub fn scoped_read<R>(&self, key: impl Keyable, f: impl Fn(L::DataRef<'_>) -> R) -> R {
+ unsafe {
+ // safety: we have the thread key
+ self.raw_read();
+
+ // safety: the data was just locked
+ let r = f(self.data_ref());
+
+ // safety: the collection is still locked
+ self.raw_unlock_read();
+
+ drop(key); // ensure the key stays alive long enough
+
+ r
+ }
+ }
+
+ pub fn scoped_try_read<Key: Keyable, R>(
+ &self,
+ key: Key,
+ f: impl Fn(L::DataRef<'_>) -> R,
+ ) -> Result<R, Key> {
+ unsafe {
+ // safety: we have the thread key
+ if !self.raw_try_read() {
+ return Err(key);
+ }
+
+ // safety: we just locked the collection
+ let r = f(self.data_ref());
+
+ // safety: the collection is still locked
+ self.raw_unlock_read();
+
+ drop(key); // ensures the key stays valid long enough
+
+ Ok(r)
+ }
+ }
+
/// Locks the collection, so that other threads can still read from it
///
/// This function returns a guard that can be used to access the underlying
@@ -372,10 +436,8 @@ impl<'a, L: Sharable> RefLockCollection<'a, L> {
/// assert_eq!(*guard.0, 0);
/// assert_eq!(*guard.1, "");
/// ```
- pub fn read<'g, 'key: 'g, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> LockGuard<'key, L::ReadGuard<'g>, Key> {
+ #[must_use]
+ pub fn read(&self, key: ThreadKey) -> LockGuard<L::ReadGuard<'_>> {
unsafe {
// safety: we have the thread key
self.raw_read();
@@ -384,7 +446,6 @@ impl<'a, L: Sharable> RefLockCollection<'a, L> {
// safety: we've already acquired the lock
guard: self.data.read_guard(),
key,
- _phantom: PhantomData,
}
}
}
@@ -412,33 +473,26 @@ impl<'a, L: Sharable> RefLockCollection<'a, L> {
/// let lock = RefLockCollection::new(&data);
///
/// match lock.try_read(key) {
- /// Some(mut guard) => {
+ /// Ok(mut guard) => {
/// assert_eq!(*guard.0, 5);
/// assert_eq!(*guard.1, "6");
/// },
- /// None => unreachable!(),
+ /// Err(_) => unreachable!(),
/// };
///
/// ```
- pub fn try_read<'g, 'key: 'g, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> Option<LockGuard<'key, L::ReadGuard<'g>, Key>> {
+ pub fn try_read(&self, key: ThreadKey) -> Result<LockGuard<L::ReadGuard<'_>>, ThreadKey> {
let guard = unsafe {
// safety: we have the thread key
if !self.raw_try_read() {
- return None;
+ return Err(key);
}
// safety: we've acquired the locks
self.data.read_guard()
};
- Some(LockGuard {
- guard,
- key,
- _phantom: PhantomData,
- })
+ Ok(LockGuard { guard, key })
}
/// Unlocks the underlying lockable data type, returning the key that's
@@ -458,9 +512,7 @@ impl<'a, L: Sharable> RefLockCollection<'a, L> {
/// let key = RefLockCollection::<(RwLock<i32>, RwLock<&str>)>::unlock_read(guard);
/// ```
#[allow(clippy::missing_const_for_fn)]
- pub fn unlock_read<'key: 'a, Key: Keyable + 'key>(
- guard: LockGuard<'key, L::ReadGuard<'a>, Key>,
- ) -> Key {
+ pub fn unlock_read(guard: LockGuard<L::ReadGuard<'_>>) -> ThreadKey {
drop(guard.guard);
guard.key
}
@@ -497,7 +549,7 @@ where
#[cfg(test)]
mod tests {
use super::*;
- use crate::{Mutex, ThreadKey};
+ use crate::{Mutex, RwLock, ThreadKey};
#[test]
fn non_duplicates_allowed() {
@@ -513,6 +565,85 @@ mod tests {
}
#[test]
+ fn try_lock_succeeds_for_unlocked_collection() {
+ let key = ThreadKey::get().unwrap();
+ let mutexes = [Mutex::new(24), Mutex::new(42)];
+ let collection = RefLockCollection::new(&mutexes);
+ let guard = collection.try_lock(key).unwrap();
+ assert_eq!(*guard[0], 24);
+ assert_eq!(*guard[1], 42);
+ }
+
+ #[test]
+ fn try_lock_fails_for_locked_collection() {
+ let key = ThreadKey::get().unwrap();
+ let mutexes = [Mutex::new(24), Mutex::new(42)];
+ let collection = RefLockCollection::new(&mutexes);
+
+ std::thread::scope(|s| {
+ s.spawn(|| {
+ let key = ThreadKey::get().unwrap();
+ let guard = mutexes[1].lock(key);
+ assert_eq!(*guard, 42);
+ std::mem::forget(guard);
+ });
+ });
+
+ let guard = collection.try_lock(key);
+ assert!(guard.is_err());
+ }
+
+ #[test]
+ fn try_read_succeeds_for_unlocked_collection() {
+ let key = ThreadKey::get().unwrap();
+ let mutexes = [RwLock::new(24), RwLock::new(42)];
+ let collection = RefLockCollection::new(&mutexes);
+ let guard = collection.try_read(key).unwrap();
+ assert_eq!(*guard[0], 24);
+ assert_eq!(*guard[1], 42);
+ }
+
+ #[test]
+ fn try_read_fails_for_locked_collection() {
+ let key = ThreadKey::get().unwrap();
+ let mutexes = [RwLock::new(24), RwLock::new(42)];
+ let collection = RefLockCollection::new(&mutexes);
+
+ std::thread::scope(|s| {
+ s.spawn(|| {
+ let key = ThreadKey::get().unwrap();
+ let guard = mutexes[1].write(key);
+ assert_eq!(*guard, 42);
+ std::mem::forget(guard);
+ });
+ });
+
+ let guard = collection.try_read(key);
+ assert!(guard.is_err());
+ }
+
+ #[test]
+ fn can_read_twice_on_different_threads() {
+ let key = ThreadKey::get().unwrap();
+ let mutexes = [RwLock::new(24), RwLock::new(42)];
+ let collection = RefLockCollection::new(&mutexes);
+
+ std::thread::scope(|s| {
+ s.spawn(|| {
+ let key = ThreadKey::get().unwrap();
+ let guard = collection.read(key);
+ assert_eq!(*guard[0], 24);
+ assert_eq!(*guard[1], 42);
+ std::mem::forget(guard);
+ });
+ });
+
+ let guard = collection.try_read(key).unwrap();
+ assert_eq!(*guard[0], 24);
+ assert_eq!(*guard[1], 42);
+ }
+
+ #[test]
fn works_in_collection() {
let key = ThreadKey::get().unwrap();
let mutex1 = Mutex::new(0);
diff --git a/src/collection/retry.rs b/src/collection/retry.rs
index 331b669..775ea29 100644
--- a/src/collection/retry.rs
+++ b/src/collection/retry.rs
@@ -1,24 +1,18 @@
use std::cell::Cell;
use std::collections::HashSet;
-use std::marker::PhantomData;
use crate::collection::utils;
use crate::handle_unwind::handle_unwind;
use crate::lockable::{
Lockable, LockableGetMut, LockableIntoInner, OwnedLockable, RawLock, Sharable,
};
-use crate::Keyable;
+use crate::{Keyable, ThreadKey};
-use super::utils::{attempt_to_recover_locks_from_panic, attempt_to_recover_reads_from_panic};
+use super::utils::{
+ attempt_to_recover_locks_from_panic, attempt_to_recover_reads_from_panic, get_locks_unsorted,
+};
use super::{LockGuard, RetryingLockCollection};
-/// Get all raw locks in the collection
-fn get_locks<L: Lockable>(data: &L) -> Vec<&dyn RawLock> {
- let mut locks = Vec::new();
- data.get_ptrs(&mut locks);
- locks
-}
-
/// Checks that a collection contains no duplicate references to a lock.
fn contains_duplicates<L: Lockable>(data: L) -> bool {
let mut locks = Vec::new();
@@ -40,14 +34,14 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
#[mutants::skip] // this should never run
#[cfg(not(tarpaulin_include))]
fn poison(&self) {
- let locks = get_locks(&self.data);
+ let locks = get_locks_unsorted(&self.data);
for lock in locks {
lock.poison();
}
}
unsafe fn raw_lock(&self) {
- let locks = get_locks(&self.data);
+ let locks = get_locks_unsorted(&self.data);
if locks.is_empty() {
// this probably prevents a panic later
@@ -109,7 +103,7 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
}
unsafe fn raw_try_lock(&self) -> bool {
- let locks = get_locks(&self.data);
+ let locks = get_locks_unsorted(&self.data);
if locks.is_empty() {
// this is an interesting case, but it doesn't give us access to
@@ -139,7 +133,7 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
}
unsafe fn raw_unlock(&self) {
- let locks = get_locks(&self.data);
+ let locks = get_locks_unsorted(&self.data);
for lock in locks {
lock.raw_unlock();
@@ -147,7 +141,7 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
}
unsafe fn raw_read(&self) {
- let locks = get_locks(&self.data);
+ let locks = get_locks_unsorted(&self.data);
if locks.is_empty() {
// this probably prevents a panic later
@@ -200,7 +194,7 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
}
unsafe fn raw_try_read(&self) -> bool {
- let locks = get_locks(&self.data);
+ let locks = get_locks_unsorted(&self.data);
if locks.is_empty() {
// this is an interesting case, but it doesn't give us access to
@@ -229,7 +223,7 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
}
unsafe fn raw_unlock_read(&self) {
- let locks = get_locks(&self.data);
+ let locks = get_locks_unsorted(&self.data);
for lock in locks {
lock.raw_unlock_read();
@@ -243,6 +237,11 @@ unsafe impl<L: Lockable> Lockable for RetryingLockCollection<L> {
where
Self: 'g;
+ type DataMut<'a>
+ = L::DataMut<'a>
+ where
+ Self: 'a;
+
fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
self.data.get_ptrs(ptrs)
}
@@ -250,6 +249,10 @@ unsafe impl<L: Lockable> Lockable for RetryingLockCollection<L> {
unsafe fn guard(&self) -> Self::Guard<'_> {
self.data.guard()
}
+
+ unsafe fn data_mut(&self) -> Self::DataMut<'_> {
+ self.data.data_mut()
+ }
}
unsafe impl<L: Sharable> Sharable for RetryingLockCollection<L> {
@@ -258,9 +261,18 @@ unsafe impl<L: Sharable> Sharable for RetryingLockCollection<L> {
where
Self: 'g;
+ type DataRef<'a>
+ = L::DataRef<'a>
+ where
+ Self: 'a;
+
unsafe fn read_guard(&self) -> Self::ReadGuard<'_> {
self.data.read_guard()
}
+
+ unsafe fn data_ref(&self) -> Self::DataRef<'_> {
+ self.data.data_ref()
+ }
}
unsafe impl<L: OwnedLockable> OwnedLockable for RetryingLockCollection<L> {}
@@ -516,6 +528,46 @@ impl<L: Lockable> RetryingLockCollection<L> {
(!contains_duplicates(&data)).then_some(Self { data })
}
+ pub fn scoped_lock<R>(&self, key: impl Keyable, f: impl Fn(L::DataMut<'_>) -> R) -> R {
+ unsafe {
+ // safety: we have the thread key
+ self.raw_lock();
+
+ // safety: the data was just locked
+ let r = f(self.data_mut());
+
+ // safety: the collection is still locked
+ self.raw_unlock();
+
+ drop(key); // ensure the key stays alive long enough
+
+ r
+ }
+ }
+
+ pub fn scoped_try_lock<Key: Keyable, R>(
+ &self,
+ key: Key,
+ f: impl Fn(L::DataMut<'_>) -> R,
+ ) -> Result<R, Key> {
+ unsafe {
+ // safety: we have the thread key
+ if !self.raw_try_lock() {
+ return Err(key);
+ }
+
+ // safety: we just locked the collection
+ let r = f(self.data_mut());
+
+ // safety: the collection is still locked
+ self.raw_unlock();
+
+ drop(key); // ensures the key stays valid long enough
+
+ Ok(r)
+ }
+ }
+
/// Locks the collection
///
/// This function returns a guard that can be used to access the underlying
@@ -536,10 +588,7 @@ impl<L: Lockable> RetryingLockCollection<L> {
/// *guard.0 += 1;
/// *guard.1 = "1";
/// ```
- pub fn lock<'g, 'key: 'g, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> LockGuard<'key, L::Guard<'g>, Key> {
+ pub fn lock(&self, key: ThreadKey) -> LockGuard<L::Guard<'_>> {
unsafe {
// safety: we're taking the thread key
self.raw_lock();
@@ -548,7 +597,6 @@ impl<L: Lockable> RetryingLockCollection<L> {
// safety: we just locked the collection
guard: self.guard(),
key,
- _phantom: PhantomData,
}
}
}
@@ -583,10 +631,7 @@ impl<L: Lockable> RetryingLockCollection<L> {
/// };
///
/// ```
- pub fn try_lock<'g, 'key: 'g, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> Result<LockGuard<'key, L::Guard<'g>, Key>, Key> {
+ pub fn try_lock(&self, key: ThreadKey) -> Result<LockGuard<L::Guard<'_>>, ThreadKey> {
unsafe {
// safety: we're taking the thread key
if self.raw_try_lock() {
@@ -594,7 +639,6 @@ impl<L: Lockable> RetryingLockCollection<L> {
// safety: we just succeeded in locking everything
guard: self.guard(),
key,
- _phantom: PhantomData,
})
} else {
Err(key)
@@ -620,13 +664,53 @@ impl<L: Lockable> RetryingLockCollection<L> {
/// *guard.1 = "1";
/// let key = RetryingLockCollection::<(Mutex<i32>, Mutex<&str>)>::unlock(guard);
/// ```
- pub fn unlock<'key, Key: Keyable + 'key>(guard: LockGuard<'key, L::Guard<'_>, Key>) -> Key {
+ pub fn unlock(guard: LockGuard<L::Guard<'_>>) -> ThreadKey {
drop(guard.guard);
guard.key
}
}
impl<L: Sharable> RetryingLockCollection<L> {
+ pub fn scoped_read<R>(&self, key: impl Keyable, f: impl Fn(L::DataRef<'_>) -> R) -> R {
+ unsafe {
+ // safety: we have the thread key
+ self.raw_read();
+
+ // safety: the data was just locked
+ let r = f(self.data_ref());
+
+ // safety: the collection is still locked
+ self.raw_unlock_read();
+
+ drop(key); // ensure the key stays alive long enough
+
+ r
+ }
+ }
+
+ pub fn scoped_try_read<Key: Keyable, R>(
+ &self,
+ key: Key,
+ f: impl Fn(L::DataRef<'_>) -> R,
+ ) -> Result<R, Key> {
+ unsafe {
+ // safety: we have the thread key
+ if !self.raw_try_read() {
+ return Err(key);
+ }
+
+ // safety: we just locked the collection
+ let r = f(self.data_ref());
+
+ // safety: the collection is still locked
+ self.raw_unlock_read();
+
+ drop(key); // ensures the key stays valid long enough
+
+ Ok(r)
+ }
+ }
+
/// Locks the collection, so that other threads can still read from it
///
/// This function returns a guard that can be used to access the underlying
@@ -647,10 +731,7 @@ impl<L: Sharable> RetryingLockCollection<L> {
/// assert_eq!(*guard.0, 0);
/// assert_eq!(*guard.1, "");
/// ```
- pub fn read<'g, 'key: 'g, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> LockGuard<'key, L::ReadGuard<'g>, Key> {
+ pub fn read(&self, key: ThreadKey) -> LockGuard<L::ReadGuard<'_>> {
unsafe {
// safety: we're taking the thread key
self.raw_read();
@@ -659,7 +740,6 @@ impl<L: Sharable> RetryingLockCollection<L> {
// safety: we just locked the collection
guard: self.read_guard(),
key,
- _phantom: PhantomData,
}
}
}
@@ -687,25 +767,25 @@ impl<L: Sharable> RetryingLockCollection<L> {
/// let lock = RetryingLockCollection::new(data);
///
/// match lock.try_read(key) {
- /// Some(mut guard) => {
+ /// Ok(mut guard) => {
/// assert_eq!(*guard.0, 5);
/// assert_eq!(*guard.1, "6");
/// },
- /// None => unreachable!(),
+ /// Err(_) => unreachable!(),
/// };
///
/// ```
- pub fn try_read<'g, 'key: 'g, Key: Keyable + 'key>(
- &'g self,
- key: Key,
- ) -> Option<LockGuard<'key, L::ReadGuard<'g>, Key>> {
+ pub fn try_read(&self, key: ThreadKey) -> Result<LockGuard<L::ReadGuard<'_>>, ThreadKey> {
unsafe {
// safety: we're taking the thread key
- self.raw_try_lock().then(|| LockGuard {
+ if !self.raw_try_lock() {
+ return Err(key);
+ }
+
+ Ok(LockGuard {
// safety: we just succeeded in locking everything
guard: self.read_guard(),
key,
- _phantom: PhantomData,
})
}
}
@@ -726,9 +806,7 @@ impl<L: Sharable> RetryingLockCollection<L> {
/// let mut guard = lock.read(key);
/// let key = RetryingLockCollection::<(RwLock<i32>, RwLock<&str>)>::unlock_read(guard);
/// ```
- pub fn unlock_read<'key, Key: Keyable + 'key>(
- guard: LockGuard<'key, L::ReadGuard<'_>, Key>,
- ) -> Key {
+ pub fn unlock_read(guard: LockGuard<L::ReadGuard<'_>>) -> ThreadKey {
drop(guard.guard);
guard.key
}
@@ -833,7 +911,7 @@ where
mod tests {
use super::*;
use crate::collection::BoxedLockCollection;
- use crate::{Mutex, RwLock, ThreadKey};
+ use crate::{LockCollection, Mutex, RwLock, ThreadKey};
#[test]
fn nonduplicate_lock_references_are_allowed() {
@@ -869,7 +947,6 @@ mod tests {
let rwlock1 = RwLock::new(0);
let rwlock2 = RwLock::new(0);
let collection = RetryingLockCollection::try_new([&rwlock1, &rwlock2]).unwrap();
- // TODO Poisonable::read
let guard = collection.read(key);
@@ -909,13 +986,14 @@ mod tests {
#[test]
fn lock_empty_lock_collection() {
- let mut key = ThreadKey::get().unwrap();
+ let key = ThreadKey::get().unwrap();
let collection: RetryingLockCollection<[RwLock<i32>; 0]> = RetryingLockCollection::new([]);
- let guard = collection.lock(&mut key);
+ let guard = collection.lock(key);
assert!(guard.len() == 0);
+ let key = LockCollection::<[RwLock<_>; 0]>::unlock(guard);
- let guard = collection.read(&mut key);
+ let guard = collection.read(key);
assert!(guard.len() == 0);
}
}
diff --git a/src/collection/utils.rs b/src/collection/utils.rs
index d6d50f4..1d96e5c 100644
--- a/src/collection/utils.rs
+++ b/src/collection/utils.rs
@@ -1,7 +1,35 @@
use std::cell::Cell;
use crate::handle_unwind::handle_unwind;
-use crate::lockable::RawLock;
+use crate::lockable::{Lockable, RawLock};
+
+#[must_use]
+pub fn get_locks<L: Lockable>(data: &L) -> Vec<&dyn RawLock> {
+ let mut locks = Vec::new();
+ data.get_ptrs(&mut locks);
+ locks.sort_by_key(|lock| &raw const **lock);
+ locks
+}
+
+#[must_use]
+pub fn get_locks_unsorted<L: Lockable>(data: &L) -> Vec<&dyn RawLock> {
+ let mut locks = Vec::new();
+ data.get_ptrs(&mut locks);
+ locks
+}
+
+/// returns `true` if the sorted list contains a duplicate
+#[must_use]
+pub fn ordered_contains_duplicates(l: &[&dyn RawLock]) -> bool {
+ if l.is_empty() {
+ // Return early to prevent panic in the below call to `windows`
+ return false;
+ }
+
+ l.windows(2)
+ // NOTE: addr_eq is necessary because eq would also compare the v-table pointers
+ .any(|window| std::ptr::addr_eq(window[0], window[1]))
+}
/// Lock a set of locks in the given order. It's UB to call this without a `ThreadKey`
pub unsafe fn ordered_lock(locks: &[&dyn RawLock]) {
@@ -115,3 +143,13 @@ pub unsafe fn attempt_to_recover_reads_from_panic(locked: &[&dyn RawLock]) {
|| locked.iter().for_each(|l| l.poison()),
)
}
+
+#[cfg(test)]
+mod tests {
+ use crate::collection::utils::ordered_contains_duplicates;
+
+ #[test]
+ fn empty_array_does_not_contain_duplicates() {
+ assert!(!ordered_contains_duplicates(&[]))
+ }
+}