summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.toml5
-rw-r--r--happylock.md197
-rw-r--r--src/collection/boxed.rs102
-rw-r--r--src/collection/guard.rs63
-rw-r--r--src/collection/owned.rs3
-rw-r--r--src/collection/ref.rs2
-rw-r--r--src/collection/retry.rs111
-rw-r--r--src/collection/utils.rs46
-rw-r--r--src/key.rs6
-rw-r--r--src/lockable.rs4
-rw-r--r--src/mutex.rs30
-rw-r--r--src/mutex/guard.rs8
-rw-r--r--src/mutex/mutex.rs1
-rw-r--r--src/poisonable/error.rs4
-rw-r--r--src/poisonable/flag.rs1
-rw-r--r--src/poisonable/guard.rs8
-rw-r--r--src/poisonable/poisonable.rs1
-rw-r--r--src/rwlock/read_guard.rs8
-rw-r--r--src/rwlock/read_lock.rs3
-rw-r--r--src/rwlock/rwlock.rs1
-rw-r--r--src/rwlock/write_guard.rs8
-rw-r--r--src/rwlock/write_lock.rs1
-rw-r--r--tests/evil_mutex.rs26
-rw-r--r--tests/evil_rwlock.rs26
24 files changed, 530 insertions, 135 deletions
diff --git a/Cargo.toml b/Cargo.toml
index 59302c7..8b7036b 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "happylock"
-version = "0.4.2"
+version = "0.4.3"
authors = ["Mica White <botahamec@outlook.com>"]
edition = "2021"
rust-version = "1.82"
@@ -25,3 +25,6 @@ parking_lot = "0.12"
[features]
default = ["parking_lot"]
+
+[lints.rust]
+unexpected_cfgs = { level = "warn", check-cfg = ["cfg(tarpaulin_include)"] }
diff --git a/happylock.md b/happylock.md
index 973b819..c157e19 100644
--- a/happylock.md
+++ b/happylock.md
@@ -13,6 +13,96 @@ deadlock-free mutexes at compile-time
---
+## What is a Mutex?
+
+It gives mutually-exclusive access for one thread, to prevent races.
+
+```c
+static pthread_mutex_t mutex = PTHREAD_MUTEX_INIT;
+static int number = 1;
+
+void thread_1() {
+ pthread_mutex_lock(&mutex);
+ number = 6;
+ pthread_mutex_unlock(&mutex);
+}
+void thread_2() {
+ pthread_mutex_lock(&mutex);
+ printf("%d", number);
+ pthread_mutex_unlock(&mutex);
+}
+```
+
+This prevents the number from being modified while it is being printed, which would cause undefined behavior.
+
+---
+
+## Quick Refresh on Borrow Checker Rules
+
+1. You may have multiple immutable references to a value at a time
+2. If there is a mutable reference to a value, then it is the only reference
+3. Values cannot be moved while they are being referenced
+
+```rust
+let s = String::new("Hello, world!");
+let r1 = &s;
+let r2 = &s; // this is allowed because of #1
+let mr = &mut s; // illegal: rule #2
+drop(s); // also illegal: rule #3
+println!("{r1} {r2}");
+```
+
+---
+
+## Rust Mutexes
+
+In Rust, the mutex is safer than in C. The mutex protects data the data itself rather than sections of code.
+
+```rust
+static NUMBER: Mutex<i32> = Mutex::new(1);
+
+fn thread_1() {
+ // MutexGuard grants access to the data inside of the mutex
+ // We cannot access this data without locking first
+ let number: MutexGuard<'_, i32> = NUMBER.lock().unwrap();
+ // MutexGuard is a smart pointer that we can modify directly
+ *number += 5;
+
+ // when the MutexGuard goes out of scope, it unlocks the mutex for you
+}
+```
+
+---
+
+## What is a deadlock?
+
+The lock function locks until no thread has acquired the thread key.
+
+A simple way to cause deadlock is to lock twice on the same thread.
+
+```rust
+let number = Mutex::new(1);
+let guard1 = number.lock().unwrap();
+// now everybody has to wait until guard1 is dropped
+
+let guard2 = number.lock().unwrap(); // but wait, guard1 still exists
+// and we can't drop guard1, because we have to wait for this to finish
+// THIS IS A DEADLOCK! (in C, this causes undefined behavior)
+
+// we'll never get to do this
+println!("{guard1} {guard2}");
+```
+
+---
+
+## The Dining Philosopher's Problem
+
+This is another example of deadlock, which is in the category of "deadly embrace"
+
+<img src="https://external-content.duckduckgo.com/iu/?u=https%3A%2F%2Ffiles.codingninjas.in%2Farticle_images%2Fdining-philosopher-problem-using-semaphores-1-1643507259.jpg&f=1&nofb=1&ipt=d8d17865fc8cb4c2e66454e6833d43e83f3965573786c2c54cff601bae03e8a3&ipo=images" height="480" />
+
+---
+
## Four Conditions for Deadlock
1. Mutual Exclusion
@@ -70,23 +160,6 @@ Acquiring a new lock requires releasing all currently-held locks.
---
-## Quick Refresh on Borrow Checker Rules
-
-1. You may have multiple immutable references to a value at a time
-2. If there is a mutable reference to a value, then it is the only reference
-3. Values cannot be moved while they are being referenced
-
-```rust
-let s = String::new("Hello, world!");
-let r1 = &s;
-let r2 = &s; // this is allowed because of #1
-let mr = &mut s; // illegal: rule #2
-drop(s); // also illegal: rule #3
-println!("{r1} {r2}");
-```
-
----
-
## How could an Operating System do this?
```c
@@ -130,7 +203,7 @@ fn main() {
## Performance: it's freaking fast
-`ThreadKey` is a mostly zero-cosst abstraction. It takes no memory at runtime. The only cost is getting and dropping the key.
+`ThreadKey` is a mostly zero-cost abstraction. It takes no memory at runtime. The only cost is getting and dropping the key.
`Mutex` is a thin wrapper around `parking_lot`. There's also a `spin` backend if needed for some reason.
@@ -295,7 +368,7 @@ This pattern will probably end eventually, but we should really avoid it, for pe
- I can't sort a tuple
- Don't return them sorted, silly
- Indexing the locks in the right order
- - Have `get_ptrs` return a `&dyn Lock`
+ - Have `get_ptrs` return a `&dyn RawLock`
- Start by locking everything
- Then call a separate `guard` method to create the guard
@@ -312,7 +385,7 @@ unsafe trait RawLock {
unsafe trait Lockable { // this is a bad name (LockGroup?)
type Guard<'g>;
- fn get_locks<'a>(&'a self, &mut Vec<&'a dyn Lock>);
+ fn get_locks<'a>(&'a self, &mut Vec<&'a dyn RawLock>);
unsafe fn guard<'g>(&'g self) -> Self::Guard<'g>;
}
```
@@ -416,10 +489,10 @@ unsafe trait RawLock {
unsafe fn unlock_read(&self);
}
-// update Lockable
-unsafe trait Lockable {
- // * snip *
+// This trait is used to indicate that reading is actually useful
+unsafe trait Sharable: Lockable {
type ReadGuard<'g> where Self: 'g;
+
unsafe fn read_guard<'g>(&'g self) -> Self::ReadGuard<'g>;
}
```
@@ -429,9 +502,6 @@ unsafe trait Lockable {
## Not every lock can be read tho
```rust
-// This trait is used to indicate that reading is actually useful
-unsafe trait Sharable: Lockable {}
-
impl<L: Sharable> OwnedLockable<L> {
pub fn read<..>(&'g self, key: Key) -> LockGuard<..> { /* ... */ }
@@ -467,7 +537,7 @@ Allows: `Poisonable<LockCollection>` and `LockCollection<Poisonable>`
---
-# `LockableGetMut` and `LockableIntoInner`
+# `LockableGetMut`
```rust
fn Mutex::<T>::get_mut(&mut self) -> &mut T // already exists in std
@@ -508,7 +578,6 @@ impl<A: LockableGetMut, B: LockableGetMut> LockableGetMut for (A, B) {
---
-
## OS Locks
- Using `parking_lot` makes the binary size much larger
@@ -519,14 +588,6 @@ impl<A: LockableGetMut, B: LockableGetMut> LockableGetMut for (A, B) {
---
-## Compile-Time Duplicate Checks
-
-As Mikhail keeps reminding me, it might be possible to do the duplicate detection at compile-time using a Bloom filter. This is something I'll have to try at some point.
-
-This would only be useful for `RetryingLockCollection`
-
----
-
# Convenience Methods
`Mutex::lock_swap`, `lock_clone`, etc would be cool
@@ -543,17 +604,9 @@ let cloned = mutex.lock_clone(); // deadlock
---
-# Try-Convenience Methods
-
-- `Mutex::try_swap`, `try_set` would not require a `ThreadKey`
-- They can't block the current thread (because `try`), and they can't block other threads (because they release the lock immediately)
-- Same probably applies to `try_clone` and `try_take`, but could be a problem if the `Clone` implementation tries to lock something else
-
----
-
## `LockCell`
-This would only allow methods tyo be called which immediately release the lock
+This would only allow methods to be called which immediately release the lock
This would never require a `ThreadKey`
@@ -614,9 +667,10 @@ A `Readonly` collection cannot be exclusively locked.
- Condvar and Barrier
- these have completely different deadlocking rules. someone else should figure this out
- LazyLock and OnceLock
- - can these even deadlock?
+ - These can deadlock, but it's very hard to do so accidentally
---
+
## Expanding Cyclic Wait
> ... sometimes you need to lock an object to read its value and determine what should be locked next... is there a way to address it?
@@ -624,7 +678,7 @@ A `Readonly` collection cannot be exclusively locked.
```rust
let guard = m1.lock(key);
if *guard == true {
- let key = Mutex::unlock(m);
+ let key = Mutex::unlock(guard);
let data = [&m1, &m2];
let collection = LockCollection::try_new(data).unwrap();
let guard = collection.lock(key);
@@ -660,7 +714,7 @@ This will be hard to do with tuples (but is not be impossible)
let key = ThreadKey::get().unwrap();
let collection: OwnedLockCollection<(Vec<i32>, Vec<String>);
let iterator: LockIterator<(Vec<i32>, Vec<String>)> = collection.locking_iter(key);
-let (guard, next: LockIterator<Vec<String>>) = collection.next();
+let (guard, next: LockIterator<Vec<String>>) = iterator.next();
unsafe trait IntoLockIterator: Lockable {
type Next: Lockable;
@@ -712,12 +766,57 @@ impl<Current, Rest> LockIterator<Current, Rest> {
We're going to be returning a lot of guards.
-The `ThreadKey` is held by the `LockIterator`.
+The `ThreadKey` needs to be held somewhere while the guards are active.
**How do we ensure that the `ThreadKey` is not used again until all of the guards are dropped?**
---
+## The Solution
+
+First, every guard needs to have an immutable reference to the `ThreadKey`.
+
+```rust
+// this is the MutexGuard that doesn't hold a ThreadKey
+// We'll modify it to hold an immutable reference to the ThreadKey
+// ThreadKey cannot be moved or mutably referenced during this lifetime
+struct MutexRef<'a, 'key, T, Key: Keyable + 'key>
+struct RwLockReadRef<'a, 'key, T, Key: Keyable + 'key>
+struct RwLockWriteRef<'a, 'key, T, Key: Keyable + 'key>
+```
+
+---
+
+## The Solution
+
+But where do we store the `ThreadKey`?
+
+```rust
+// This type will hold the ThreadKey
+struct LockIteratorGuard<'a, 'key, L, Key: Keyable + 'key>
+```
+
+---
+
+## The Solution
+
+Then `LockIterator` must hold a reference to the guard.
+
+```rust
+struct LockIterator<'a, Current, Rest = ()>
+```
+
+---
+
+## The Solution
+
+And we can get the first LockIterator by taking a mutable reference to the guard.
+
+```rust
+LockIteratorGuard::next<'a>(&'a mut self) -> LockIterator<'a, L::Next>
+```
+
+---
<!--_class: invert lead -->
## The End
diff --git a/src/collection/boxed.rs b/src/collection/boxed.rs
index 98d7632..72489bf 100644
--- a/src/collection/boxed.rs
+++ b/src/collection/boxed.rs
@@ -22,6 +22,7 @@ fn contains_duplicates(l: &[&dyn RawLock]) -> bool {
unsafe impl<L: Lockable> RawLock for BoxedLockCollection<L> {
#[mutants::skip] // this should never be called
+ #[cfg(not(tarpaulin_include))]
fn poison(&self) {
for lock in &self.locks {
lock.poison();
@@ -33,6 +34,7 @@ unsafe impl<L: Lockable> RawLock for BoxedLockCollection<L> {
}
unsafe fn raw_try_lock(&self) -> bool {
+ println!("{}", self.locks().len());
utils::ordered_try_lock(self.locks())
}
@@ -136,6 +138,7 @@ unsafe impl<L: Sync> Sync for BoxedLockCollection<L> {}
impl<L> Drop for BoxedLockCollection<L> {
#[mutants::skip] // i can't test for a memory leak
+ #[cfg(not(tarpaulin_include))]
fn drop(&mut self) {
unsafe {
// safety: this collection will never be locked again
@@ -155,6 +158,7 @@ impl<T, L: AsRef<T>> AsRef<T> for BoxedLockCollection<L> {
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<L: Debug> Debug for BoxedLockCollection<L> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct(stringify!(BoxedLockCollection))
@@ -197,8 +201,8 @@ impl<L> BoxedLockCollection<L> {
#[must_use]
pub fn into_child(mut self) -> L {
unsafe {
- // safety: this collection will never be locked again
- self.locks.clear();
+ // safety: this collection will never be used again
+ std::ptr::drop_in_place(&mut self.locks);
// safety: this was allocated using a box, and is now unique
let boxed: Box<UnsafeCell<L>> = Box::from_raw(self.data.cast_mut());
// to prevent a double free
@@ -621,7 +625,7 @@ where
#[cfg(test)]
mod tests {
use super::*;
- use crate::{Mutex, ThreadKey};
+ use crate::{Mutex, RwLock, ThreadKey};
#[test]
fn non_duplicates_allowed() {
@@ -637,6 +641,98 @@ mod tests {
}
#[test]
+ fn contains_duplicates_empty() {
+ assert!(!contains_duplicates(&[]))
+ }
+
+ #[test]
+ fn try_lock_works() {
+ let key = ThreadKey::get().unwrap();
+ let collection = BoxedLockCollection::new([Mutex::new(1), Mutex::new(2)]);
+ let guard = collection.try_lock(key);
+
+ std::thread::scope(|s| {
+ s.spawn(|| {
+ let key = ThreadKey::get().unwrap();
+ let guard = collection.try_lock(key);
+ assert!(guard.is_err());
+ });
+ });
+
+ assert!(guard.is_ok());
+ }
+
+ #[test]
+ fn try_read_works() {
+ let key = ThreadKey::get().unwrap();
+ let collection = BoxedLockCollection::new([RwLock::new(1), RwLock::new(2)]);
+ let guard = collection.try_read(key);
+
+ std::thread::scope(|s| {
+ s.spawn(|| {
+ let key = ThreadKey::get().unwrap();
+ let guard = collection.try_read(key);
+ assert!(guard.is_ok());
+ });
+ });
+
+ assert!(guard.is_ok());
+ }
+
+ #[test]
+ fn try_lock_fails_with_one_exclusive_lock() {
+ let key = ThreadKey::get().unwrap();
+ let locks = [Mutex::new(1), Mutex::new(2)];
+ let collection = BoxedLockCollection::new_ref(&locks);
+ let guard = locks[1].try_lock(key);
+
+ std::thread::scope(|s| {
+ s.spawn(|| {
+ let key = ThreadKey::get().unwrap();
+ let guard = collection.try_lock(key);
+ assert!(guard.is_err());
+ });
+ });
+
+ assert!(guard.is_ok());
+ }
+
+ #[test]
+ fn try_read_fails_during_exclusive_lock() {
+ let key = ThreadKey::get().unwrap();
+ let collection = BoxedLockCollection::new([RwLock::new(1), RwLock::new(2)]);
+ let guard = collection.try_lock(key);
+
+ std::thread::scope(|s| {
+ s.spawn(|| {
+ let key = ThreadKey::get().unwrap();
+ let guard = collection.try_read(key);
+ assert!(guard.is_err());
+ });
+ });
+
+ assert!(guard.is_ok());
+ }
+
+ #[test]
+ fn try_read_fails_with_one_exclusive_lock() {
+ let key = ThreadKey::get().unwrap();
+ let locks = [RwLock::new(1), RwLock::new(2)];
+ let collection = BoxedLockCollection::new_ref(&locks);
+ let guard = locks[1].try_write(key);
+
+ std::thread::scope(|s| {
+ s.spawn(|| {
+ let key = ThreadKey::get().unwrap();
+ let guard = collection.try_read(key);
+ assert!(guard.is_err());
+ });
+ });
+
+ assert!(guard.is_ok());
+ }
+
+ #[test]
fn works_in_collection() {
let key = ThreadKey::get().unwrap();
let mutex1 = Mutex::new(0);
diff --git a/src/collection/guard.rs b/src/collection/guard.rs
index 9412343..eea13ed 100644
--- a/src/collection/guard.rs
+++ b/src/collection/guard.rs
@@ -7,6 +7,7 @@ use crate::key::Keyable;
use super::LockGuard;
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<Guard: PartialEq, Key: Keyable> PartialEq for LockGuard<'_, Guard, Key> {
fn eq(&self, other: &Self) -> bool {
self.guard.eq(&other.guard)
@@ -14,6 +15,7 @@ impl<Guard: PartialEq, Key: Keyable> PartialEq for LockGuard<'_, Guard, Key> {
}
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<Guard: PartialOrd, Key: Keyable> PartialOrd for LockGuard<'_, Guard, Key> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.guard.partial_cmp(&other.guard)
@@ -21,9 +23,11 @@ impl<Guard: PartialOrd, Key: Keyable> PartialOrd for LockGuard<'_, Guard, Key> {
}
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<Guard: Eq, Key: Keyable> Eq for LockGuard<'_, Guard, Key> {}
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<Guard: Ord, Key: Keyable> Ord for LockGuard<'_, Guard, Key> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.guard.cmp(&other.guard)
@@ -31,6 +35,7 @@ impl<Guard: Ord, Key: Keyable> Ord for LockGuard<'_, Guard, Key> {
}
#[mutants::skip] // hashing involves RNG and is hard to test
+#[cfg(not(tarpaulin_include))]
impl<Guard: Hash, Key: Keyable> Hash for LockGuard<'_, Guard, Key> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.guard.hash(state)
@@ -38,6 +43,7 @@ impl<Guard: Hash, Key: Keyable> Hash for LockGuard<'_, Guard, Key> {
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<Guard: Debug, Key: Keyable> Debug for LockGuard<'_, Guard, Key> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&**self, f)
@@ -79,7 +85,7 @@ impl<Guard, Key: Keyable> AsMut<Guard> for LockGuard<'_, Guard, Key> {
#[cfg(test)]
mod tests {
use crate::collection::OwnedLockCollection;
- use crate::{RwLock, ThreadKey};
+ use crate::{LockCollection, Mutex, RwLock, ThreadKey};
#[test]
fn guard_display_works() {
@@ -88,4 +94,59 @@ mod tests {
let guard = lock.read(key);
assert_eq!(guard.to_string(), "Hello, world!".to_string());
}
+
+ #[test]
+ fn deref_mut_works() {
+ let mut key = ThreadKey::get().unwrap();
+ let locks = (Mutex::new(1), Mutex::new(2));
+ let lock = LockCollection::new_ref(&locks);
+ let mut guard = lock.lock(&mut key);
+ *guard.0 = 3;
+ drop(guard);
+
+ let guard = locks.0.lock(&mut key);
+ assert_eq!(*guard, 3);
+ drop(guard);
+
+ let guard = locks.1.lock(&mut key);
+ assert_eq!(*guard, 2);
+ drop(guard);
+ }
+
+ #[test]
+ fn as_ref_works() {
+ let mut key = ThreadKey::get().unwrap();
+ let locks = (Mutex::new(1), Mutex::new(2));
+ let lock = LockCollection::new_ref(&locks);
+ let mut guard = lock.lock(&mut key);
+ *guard.0 = 3;
+ drop(guard);
+
+ let guard = locks.0.lock(&mut key);
+ assert_eq!(guard.as_ref(), &3);
+ drop(guard);
+
+ let guard = locks.1.lock(&mut key);
+ assert_eq!(guard.as_ref(), &2);
+ drop(guard);
+ }
+
+ #[test]
+ fn as_mut_works() {
+ let mut key = ThreadKey::get().unwrap();
+ let locks = (Mutex::new(1), Mutex::new(2));
+ let lock = LockCollection::new_ref(&locks);
+ let mut guard = lock.lock(&mut key);
+ let guard_mut = guard.as_mut();
+ *guard_mut.0 = 3;
+ drop(guard);
+
+ let guard = locks.0.lock(&mut key);
+ assert_eq!(guard.as_ref(), &3);
+ drop(guard);
+
+ let guard = locks.1.lock(&mut key);
+ assert_eq!(guard.as_ref(), &2);
+ drop(guard);
+ }
}
diff --git a/src/collection/owned.rs b/src/collection/owned.rs
index a96300d..4a0d1ef 100644
--- a/src/collection/owned.rs
+++ b/src/collection/owned.rs
@@ -8,6 +8,7 @@ use crate::Keyable;
use super::{utils, LockGuard, OwnedLockCollection};
#[mutants::skip] // it's hard to test individual locks in an OwnedLockCollection
+#[cfg(not(tarpaulin_include))]
fn get_locks<L: Lockable>(data: &L) -> Vec<&dyn RawLock> {
let mut locks = Vec::new();
data.get_ptrs(&mut locks);
@@ -16,6 +17,7 @@ fn get_locks<L: Lockable>(data: &L) -> Vec<&dyn RawLock> {
unsafe impl<L: Lockable> RawLock for OwnedLockCollection<L> {
#[mutants::skip] // this should never run
+ #[cfg(not(tarpaulin_include))]
fn poison(&self) {
let locks = get_locks(&self.data);
for lock in locks {
@@ -63,6 +65,7 @@ unsafe impl<L: Lockable> Lockable for OwnedLockCollection<L> {
Self: 'g;
#[mutants::skip] // It's hard to test lkocks in an OwnedLockCollection, because they're owned
+ #[cfg(not(tarpaulin_include))]
fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
self.data.get_ptrs(ptrs)
}
diff --git a/src/collection/ref.rs b/src/collection/ref.rs
index 512bdec..37973f6 100644
--- a/src/collection/ref.rs
+++ b/src/collection/ref.rs
@@ -41,6 +41,7 @@ where
unsafe impl<L: Lockable> RawLock for RefLockCollection<'_, L> {
#[mutants::skip] // this should never run
+ #[cfg(not(tarpaulin_include))]
fn poison(&self) {
for lock in &self.locks {
lock.poison();
@@ -109,6 +110,7 @@ impl<T, L: AsRef<T>> AsRef<T> for RefLockCollection<'_, L> {
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<L: Debug> Debug for RefLockCollection<'_, L> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct(stringify!(RefLockCollection))
diff --git a/src/collection/retry.rs b/src/collection/retry.rs
index fe0a5b8..331b669 100644
--- a/src/collection/retry.rs
+++ b/src/collection/retry.rs
@@ -1,4 +1,4 @@
-use std::cell::RefCell;
+use std::cell::Cell;
use std::collections::HashSet;
use std::marker::PhantomData;
@@ -9,6 +9,7 @@ use crate::lockable::{
};
use crate::Keyable;
+use super::utils::{attempt_to_recover_locks_from_panic, attempt_to_recover_reads_from_panic};
use super::{LockGuard, RetryingLockCollection};
/// Get all raw locks in the collection
@@ -37,6 +38,7 @@ fn contains_duplicates<L: Lockable>(data: L) -> bool {
unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
#[mutants::skip] // this should never run
+ #[cfg(not(tarpaulin_include))]
fn poison(&self) {
let locks = get_locks(&self.data);
for lock in locks {
@@ -45,7 +47,6 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
}
unsafe fn raw_lock(&self) {
- let mut first_index = 0;
let locks = get_locks(&self.data);
if locks.is_empty() {
@@ -54,16 +55,17 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
}
// these will be unlocked in case of a panic
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let first_index = Cell::new(0);
+ let locked = Cell::new(0);
handle_unwind(
|| unsafe {
'outer: loop {
// This prevents us from entering a spin loop waiting for
// the same lock to be unlocked
// safety: we have the thread key
- locks[first_index].raw_lock();
+ locks[first_index.get()].raw_lock();
for (i, lock) in locks.iter().enumerate() {
- if i == first_index {
+ if i == first_index.get() {
// we've already locked this one
continue;
}
@@ -74,23 +76,21 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
// immediately after, causing a panic
// safety: we have the thread key
if lock.raw_try_lock() {
- locked.borrow_mut().push(*lock)
+ locked.set(locked.get() + 1);
} else {
- for lock in locked.borrow().iter() {
- // safety: we already locked all of these
- lock.raw_unlock();
+ // safety: we already locked all of these
+ attempt_to_recover_locks_from_panic(&locks[0..i]);
+ if first_index.get() >= i {
+ // safety: this is already locked and can't be
+ // unlocked by the previous loop
+ locks[first_index.get()].raw_unlock();
}
- // these are no longer locked
- locked.borrow_mut().clear();
- if first_index >= i {
- // safety: this is already locked and can't be unlocked
- // by the previous loop
- locks[first_index].raw_unlock();
- }
+ // nothing is locked anymore
+ locked.set(0);
// call lock on this to prevent a spin loop
- first_index = i;
+ first_index.set(i);
continue 'outer;
}
}
@@ -99,7 +99,12 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
break;
}
},
- || utils::attempt_to_recover_locks_from_panic(&locked),
+ || {
+ utils::attempt_to_recover_locks_from_panic(&locks[0..locked.get()]);
+ if first_index.get() >= locked.get() {
+ locks[first_index.get()].raw_unlock();
+ }
+ },
)
}
@@ -113,25 +118,23 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
}
// these will be unlocked in case of a panic
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let locked = Cell::new(0);
handle_unwind(
|| unsafe {
for (i, lock) in locks.iter().enumerate() {
// safety: we have the thread key
if lock.raw_try_lock() {
- locked.borrow_mut().push(*lock);
+ locked.set(locked.get() + 1);
} else {
- for lock in locks.iter().take(i) {
- // safety: we already locked all of these
- lock.raw_unlock();
- }
+ // safety: we already locked all of these
+ attempt_to_recover_locks_from_panic(&locks[0..i]);
return false;
}
}
true
},
- || utils::attempt_to_recover_locks_from_panic(&locked),
+ || utils::attempt_to_recover_locks_from_panic(&locks[0..locked.get()]),
)
}
@@ -144,7 +147,6 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
}
unsafe fn raw_read(&self) {
- let mut first_index = 0;
let locks = get_locks(&self.data);
if locks.is_empty() {
@@ -152,35 +154,35 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
return;
}
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let locked = Cell::new(0);
+ let first_index = Cell::new(0);
handle_unwind(
|| 'outer: loop {
// safety: we have the thread key
- locks[first_index].raw_read();
+ locks[first_index.get()].raw_read();
for (i, lock) in locks.iter().enumerate() {
- if i == first_index {
+ if i == first_index.get() {
continue;
}
// safety: we have the thread key
if lock.raw_try_read() {
- locked.borrow_mut().push(*lock);
+ locked.set(locked.get() + 1);
} else {
- for lock in locked.borrow().iter() {
- // safety: we already locked all of these
- lock.raw_unlock_read();
- }
- // these are no longer locked
- locked.borrow_mut().clear();
+ // safety: we already locked all of these
+ attempt_to_recover_reads_from_panic(&locks[0..i]);
- if first_index >= i {
+ if first_index.get() >= i {
// safety: this is already locked and can't be unlocked
// by the previous loop
- locks[first_index].raw_unlock_read();
+ locks[first_index.get()].raw_unlock_read();
}
+ // these are no longer locked
+ locked.set(0);
+
// don't go into a spin loop, wait for this one to lock
- first_index = i;
+ first_index.set(i);
continue 'outer;
}
}
@@ -188,7 +190,12 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
// safety: we locked all the data
break;
},
- || utils::attempt_to_recover_reads_from_panic(&locked),
+ || {
+ utils::attempt_to_recover_reads_from_panic(&locks[0..locked.get()]);
+ if first_index.get() >= locked.get() {
+ locks[first_index.get()].raw_unlock_read();
+ }
+ },
)
}
@@ -201,25 +208,23 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
return true;
}
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let locked = Cell::new(0);
handle_unwind(
|| unsafe {
for (i, lock) in locks.iter().enumerate() {
// safety: we have the thread key
if lock.raw_try_read() {
- locked.borrow_mut().push(*lock);
+ locked.set(locked.get() + 1);
} else {
- for lock in locks.iter().take(i) {
- // safety: we already locked all of these
- lock.raw_unlock_read();
- }
+ // safety: we already locked all of these
+ attempt_to_recover_reads_from_panic(&locks[0..i]);
return false;
}
}
true
},
- || utils::attempt_to_recover_reads_from_panic(&locked),
+ || utils::attempt_to_recover_reads_from_panic(&locks[0..locked.get()]),
)
}
@@ -901,4 +906,16 @@ mod tests {
assert_eq!(collection.into_inner().len(), 2);
}
+
+ #[test]
+ fn lock_empty_lock_collection() {
+ let mut key = ThreadKey::get().unwrap();
+ let collection: RetryingLockCollection<[RwLock<i32>; 0]> = RetryingLockCollection::new([]);
+
+ let guard = collection.lock(&mut key);
+ assert!(guard.len() == 0);
+
+ let guard = collection.read(&mut key);
+ assert!(guard.len() == 0);
+ }
}
diff --git a/src/collection/utils.rs b/src/collection/utils.rs
index d368773..7f29037 100644
--- a/src/collection/utils.rs
+++ b/src/collection/utils.rs
@@ -1,4 +1,4 @@
-use std::cell::RefCell;
+use std::cell::Cell;
use crate::handle_unwind::handle_unwind;
use crate::lockable::RawLock;
@@ -6,31 +6,31 @@ use crate::lockable::RawLock;
/// Lock a set of locks in the given order. It's UB to call this without a `ThreadKey`
pub unsafe fn ordered_lock(locks: &[&dyn RawLock]) {
// these will be unlocked in case of a panic
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let locked = Cell::new(0);
handle_unwind(
|| {
for lock in locks {
lock.raw_lock();
- locked.borrow_mut().push(*lock);
+ locked.set(locked.get() + 1);
}
},
- || attempt_to_recover_locks_from_panic(&locked),
+ || attempt_to_recover_locks_from_panic(&locks[0..locked.get()]),
)
}
/// Lock a set of locks in the given order. It's UB to call this without a `ThreadKey`
pub unsafe fn ordered_read(locks: &[&dyn RawLock]) {
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let locked = Cell::new(0);
handle_unwind(
|| {
for lock in locks {
lock.raw_read();
- locked.borrow_mut().push(*lock);
+ locked.set(locked.get() + 1);
}
},
- || attempt_to_recover_reads_from_panic(&locked),
+ || attempt_to_recover_reads_from_panic(&locks[0..locked.get()]),
)
}
@@ -38,14 +38,14 @@ pub unsafe fn ordered_read(locks: &[&dyn RawLock]) {
/// locks contain duplicates, or if this is called by multiple threads with the
/// locks in different orders.
pub unsafe fn ordered_try_lock(locks: &[&dyn RawLock]) -> bool {
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let locked = Cell::new(0);
handle_unwind(
|| unsafe {
for (i, lock) in locks.iter().enumerate() {
// safety: we have the thread key
if lock.raw_try_lock() {
- locked.borrow_mut().push(*lock);
+ locked.set(locked.get() + 1);
} else {
for lock in &locks[0..i] {
// safety: this lock was already acquired
@@ -59,7 +59,7 @@ pub unsafe fn ordered_try_lock(locks: &[&dyn RawLock]) -> bool {
},
||
// safety: everything in locked is locked
- attempt_to_recover_locks_from_panic(&locked),
+ attempt_to_recover_locks_from_panic(&locks[0..locked.get()]),
)
}
@@ -67,14 +67,14 @@ pub unsafe fn ordered_try_lock(locks: &[&dyn RawLock]) -> bool {
/// is called by multiple threads with the locks in different orders.
pub unsafe fn ordered_try_read(locks: &[&dyn RawLock]) -> bool {
// these will be unlocked in case of a panic
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let locked = Cell::new(0);
handle_unwind(
|| unsafe {
for (i, lock) in locks.iter().enumerate() {
// safety: we have the thread key
if lock.raw_try_read() {
- locked.borrow_mut().push(*lock);
+ locked.set(locked.get() + 1);
} else {
for lock in &locks[0..i] {
// safety: this lock was already acquired
@@ -88,34 +88,30 @@ pub unsafe fn ordered_try_read(locks: &[&dyn RawLock]) -> bool {
},
||
// safety: everything in locked is locked
- attempt_to_recover_reads_from_panic(&locked),
+ attempt_to_recover_reads_from_panic(&locks[0..locked.get()]),
)
}
/// Unlocks the already locked locks in order to recover from a panic
-pub unsafe fn attempt_to_recover_locks_from_panic(locked: &RefCell<Vec<&dyn RawLock>>) {
+pub unsafe fn attempt_to_recover_locks_from_panic(locks: &[&dyn RawLock]) {
handle_unwind(
|| {
- let mut locked = locked.borrow_mut();
- while let Some(locked_lock) = locked.pop() {
- locked_lock.raw_unlock();
- }
+ // safety: the caller assumes that these are already locked
+ locks.iter().for_each(|lock| lock.raw_unlock());
},
// if we get another panic in here, we'll just have to poison what remains
- || locked.borrow().iter().for_each(|l| l.poison()),
+ || locks.iter().for_each(|l| l.poison()),
)
}
/// Unlocks the already locked locks in order to recover from a panic
-pub unsafe fn attempt_to_recover_reads_from_panic(locked: &RefCell<Vec<&dyn RawLock>>) {
+pub unsafe fn attempt_to_recover_reads_from_panic(locked: &[&dyn RawLock]) {
handle_unwind(
|| {
- let mut locked = locked.borrow_mut();
- while let Some(locked_lock) = locked.pop() {
- locked_lock.raw_unlock_read();
- }
+ // safety: the caller assumes these are already locked
+ locked.iter().for_each(|lock| lock.raw_unlock());
},
// if we get another panic in here, we'll just have to poison what remains
- || locked.borrow().iter().for_each(|l| l.poison()),
+ || locked.iter().for_each(|l| l.poison()),
)
}
diff --git a/src/key.rs b/src/key.rs
index c7369be..4cd145d 100644
--- a/src/key.rs
+++ b/src/key.rs
@@ -1,7 +1,6 @@
-use std::cell::Cell;
+use std::cell::{Cell, LazyCell};
use std::fmt::{self, Debug};
use std::marker::PhantomData;
-use std::sync::LazyLock;
use sealed::Sealed;
@@ -16,7 +15,7 @@ mod sealed {
}
thread_local! {
- static KEY: LazyLock<KeyCell> = LazyLock::new(KeyCell::default);
+ static KEY: LazyCell<KeyCell> = LazyCell::new(KeyCell::default);
}
/// The key for the current thread.
@@ -44,6 +43,7 @@ unsafe impl Keyable for &mut ThreadKey {}
unsafe impl Sync for ThreadKey {}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl Debug for ThreadKey {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "ThreadKey")
diff --git a/src/lockable.rs b/src/lockable.rs
index 70d442a..4f7cfe5 100644
--- a/src/lockable.rs
+++ b/src/lockable.rs
@@ -276,10 +276,8 @@ macro_rules! tuple_impls {
unsafe impl<$($generic: Lockable,)*> Lockable for ($($generic,)*) {
type Guard<'g> = ($($generic::Guard<'g>,)*) where Self: 'g;
-
-
fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
- self.0.get_ptrs(ptrs);
+ $(self.$value.get_ptrs(ptrs));*
}
unsafe fn guard(&self) -> Self::Guard<'_> {
diff --git a/src/mutex.rs b/src/mutex.rs
index 99d0981..d6cba7d 100644
--- a/src/mutex.rs
+++ b/src/mutex.rs
@@ -214,6 +214,36 @@ mod tests {
}
#[test]
+ fn ref_as_mut() {
+ let mut key = ThreadKey::get().unwrap();
+ let collection = LockCollection::new(crate::Mutex::new(0));
+ let mut guard = collection.lock(&mut key);
+ let guard_mut = guard.as_mut().as_mut();
+
+ *guard_mut = 3;
+ drop(guard);
+
+ let guard = collection.lock(&mut key);
+
+ assert_eq!(guard.as_ref().as_ref(), &3);
+ }
+
+ #[test]
+ fn guard_as_mut() {
+ let mut key = ThreadKey::get().unwrap();
+ let mutex = crate::Mutex::new(0);
+ let mut guard = mutex.lock(&mut key);
+ let guard_mut = guard.as_mut();
+
+ *guard_mut = 3;
+ drop(guard);
+
+ let guard = mutex.lock(&mut key);
+
+ assert_eq!(guard.as_ref(), &3);
+ }
+
+ #[test]
fn dropping_guard_releases_mutex() {
let mut key = ThreadKey::get().unwrap();
let mutex: crate::Mutex<_> = Mutex::new("Hello, world!");
diff --git a/src/mutex/guard.rs b/src/mutex/guard.rs
index f7a01a4..4e4d5f1 100644
--- a/src/mutex/guard.rs
+++ b/src/mutex/guard.rs
@@ -34,6 +34,7 @@ impl<T: Ord + ?Sized, R: RawMutex> Ord for MutexRef<'_, T, R> {
}
#[mutants::skip] // hashing involves RNG and is hard to test
+#[cfg(not(tarpaulin_include))]
impl<T: Hash + ?Sized, R: RawMutex> Hash for MutexRef<'_, T, R> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.deref().hash(state)
@@ -41,6 +42,7 @@ impl<T: Hash + ?Sized, R: RawMutex> Hash for MutexRef<'_, T, R> {
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<T: Debug + ?Sized, R: RawMutex> Debug for MutexRef<'_, T, R> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&**self, f)
@@ -106,6 +108,7 @@ impl<'a, T: ?Sized, R: RawMutex> MutexRef<'a, T, R> {
// there's nothing i can do about that
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<T: PartialEq + ?Sized, R: RawMutex, Key: Keyable> PartialEq for MutexGuard<'_, '_, T, Key, R> {
fn eq(&self, other: &Self) -> bool {
self.deref().eq(&**other)
@@ -113,9 +116,11 @@ impl<T: PartialEq + ?Sized, R: RawMutex, Key: Keyable> PartialEq for MutexGuard<
}
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<T: Eq + ?Sized, R: RawMutex, Key: Keyable> Eq for MutexGuard<'_, '_, T, Key, R> {}
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<T: PartialOrd + ?Sized, R: RawMutex, Key: Keyable> PartialOrd
for MutexGuard<'_, '_, T, Key, R>
{
@@ -125,6 +130,7 @@ impl<T: PartialOrd + ?Sized, R: RawMutex, Key: Keyable> PartialOrd
}
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<T: Ord + ?Sized, R: RawMutex, Key: Keyable> Ord for MutexGuard<'_, '_, T, Key, R> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.deref().cmp(&**other)
@@ -132,6 +138,7 @@ impl<T: Ord + ?Sized, R: RawMutex, Key: Keyable> Ord for MutexGuard<'_, '_, T, K
}
#[mutants::skip] // hashing involves RNG and is hard to test
+#[cfg(not(tarpaulin_include))]
impl<T: Hash + ?Sized, R: RawMutex, Key: Keyable> Hash for MutexGuard<'_, '_, T, Key, R> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.deref().hash(state)
@@ -139,6 +146,7 @@ impl<T: Hash + ?Sized, R: RawMutex, Key: Keyable> Hash for MutexGuard<'_, '_, T,
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<T: Debug + ?Sized, Key: Keyable, R: RawMutex> Debug for MutexGuard<'_, '_, T, Key, R> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&**self, f)
diff --git a/src/mutex/mutex.rs b/src/mutex/mutex.rs
index 5b838a2..0bd5286 100644
--- a/src/mutex/mutex.rs
+++ b/src/mutex/mutex.rs
@@ -130,6 +130,7 @@ impl<T, R: RawMutex> Mutex<T, R> {
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<T: ?Sized + Debug, R: RawMutex> Debug for Mutex<T, R> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// safety: this is just a try lock, and the value is dropped
diff --git a/src/poisonable/error.rs b/src/poisonable/error.rs
index 9721ce4..bff011d 100644
--- a/src/poisonable/error.rs
+++ b/src/poisonable/error.rs
@@ -4,6 +4,7 @@ use std::error::Error;
use super::{PoisonError, PoisonGuard, TryLockPoisonableError};
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<Guard> fmt::Debug for PoisonError<Guard> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("PoisonError").finish_non_exhaustive()
@@ -12,6 +13,7 @@ impl<Guard> fmt::Debug for PoisonError<Guard> {
impl<Guard> fmt::Display for PoisonError<Guard> {
#[cfg_attr(test, mutants::skip)]
+ #[cfg(not(tarpaulin_include))]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
"poisoned lock: another task failed inside".fmt(f)
}
@@ -151,6 +153,7 @@ impl<Guard> PoisonError<Guard> {
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<G, Key> fmt::Debug for TryLockPoisonableError<'_, '_, G, Key> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
@@ -162,6 +165,7 @@ impl<G, Key> fmt::Debug for TryLockPoisonableError<'_, '_, G, Key> {
impl<G, Key> fmt::Display for TryLockPoisonableError<'_, '_, G, Key> {
#[cfg_attr(test, mutants::skip)]
+ #[cfg(not(tarpaulin_include))]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Self::Poisoned(..) => "poisoned lock: another task failed inside",
diff --git a/src/poisonable/flag.rs b/src/poisonable/flag.rs
index 6b567c8..9186bbc 100644
--- a/src/poisonable/flag.rs
+++ b/src/poisonable/flag.rs
@@ -29,6 +29,7 @@ impl PoisonFlag {
}
#[mutants::skip] // None of the tests have panic = "abort", so this can't be tested
+ #[cfg(not(tarpaulin_include))]
pub fn is_poisoned(&self) -> bool {
false
}
diff --git a/src/poisonable/guard.rs b/src/poisonable/guard.rs
index 36566f5..3f85d25 100644
--- a/src/poisonable/guard.rs
+++ b/src/poisonable/guard.rs
@@ -49,6 +49,7 @@ impl<Guard: Ord> Ord for PoisonRef<'_, Guard> {
}
#[mutants::skip] // hashing involves RNG and is hard to test
+#[cfg(not(tarpaulin_include))]
impl<Guard: Hash> Hash for PoisonRef<'_, Guard> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.guard.hash(state)
@@ -56,6 +57,7 @@ impl<Guard: Hash> Hash for PoisonRef<'_, Guard> {
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<Guard: Debug> Debug for PoisonRef<'_, Guard> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&**self, f)
@@ -95,6 +97,7 @@ impl<Guard> AsMut<Guard> for PoisonRef<'_, Guard> {
}
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<Guard: PartialEq, Key: Keyable> PartialEq for PoisonGuard<'_, '_, Guard, Key> {
fn eq(&self, other: &Self) -> bool {
self.guard.eq(&other.guard)
@@ -102,6 +105,7 @@ impl<Guard: PartialEq, Key: Keyable> PartialEq for PoisonGuard<'_, '_, Guard, Ke
}
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<Guard: PartialOrd, Key: Keyable> PartialOrd for PoisonGuard<'_, '_, Guard, Key> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.guard.partial_cmp(&other.guard)
@@ -109,9 +113,11 @@ impl<Guard: PartialOrd, Key: Keyable> PartialOrd for PoisonGuard<'_, '_, Guard,
}
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<Guard: Eq, Key: Keyable> Eq for PoisonGuard<'_, '_, Guard, Key> {}
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<Guard: Ord, Key: Keyable> Ord for PoisonGuard<'_, '_, Guard, Key> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.guard.cmp(&other.guard)
@@ -119,6 +125,7 @@ impl<Guard: Ord, Key: Keyable> Ord for PoisonGuard<'_, '_, Guard, Key> {
}
#[mutants::skip] // hashing involves RNG and is hard to test
+#[cfg(not(tarpaulin_include))]
impl<Guard: Hash, Key: Keyable> Hash for PoisonGuard<'_, '_, Guard, Key> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.guard.hash(state)
@@ -126,6 +133,7 @@ impl<Guard: Hash, Key: Keyable> Hash for PoisonGuard<'_, '_, Guard, Key> {
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<Guard: Debug, Key: Keyable> Debug for PoisonGuard<'_, '_, Guard, Key> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&self.guard, f)
diff --git a/src/poisonable/poisonable.rs b/src/poisonable/poisonable.rs
index 0bc2b03..2dac4bb 100644
--- a/src/poisonable/poisonable.rs
+++ b/src/poisonable/poisonable.rs
@@ -13,6 +13,7 @@ use super::{
unsafe impl<L: Lockable + RawLock> RawLock for Poisonable<L> {
#[mutants::skip] // this should never run
+ #[cfg(not(tarpaulin_include))]
fn poison(&self) {
self.inner.poison()
}
diff --git a/src/rwlock/read_guard.rs b/src/rwlock/read_guard.rs
index 2195e44..bd22837 100644
--- a/src/rwlock/read_guard.rs
+++ b/src/rwlock/read_guard.rs
@@ -34,6 +34,7 @@ impl<T: Ord + ?Sized, R: RawRwLock> Ord for RwLockReadRef<'_, T, R> {
}
#[mutants::skip] // hashing involves PRNG and is hard to test
+#[cfg(not(tarpaulin_include))]
impl<T: Hash + ?Sized, R: RawRwLock> Hash for RwLockReadRef<'_, T, R> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.deref().hash(state)
@@ -41,6 +42,7 @@ impl<T: Hash + ?Sized, R: RawRwLock> Hash for RwLockReadRef<'_, T, R> {
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<T: Debug + ?Sized, R: RawRwLock> Debug for RwLockReadRef<'_, T, R> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&**self, f)
@@ -88,6 +90,7 @@ impl<'a, T: ?Sized, R: RawRwLock> RwLockReadRef<'a, T, R> {
}
#[mutants::skip] // it's hard to get two read guards safely
+#[cfg(not(tarpaulin_include))]
impl<T: PartialEq + ?Sized, R: RawRwLock, Key: Keyable> PartialEq
for RwLockReadGuard<'_, '_, T, Key, R>
{
@@ -97,9 +100,11 @@ impl<T: PartialEq + ?Sized, R: RawRwLock, Key: Keyable> PartialEq
}
#[mutants::skip] // it's hard to get two read guards safely
+#[cfg(not(tarpaulin_include))]
impl<T: Eq + ?Sized, R: RawRwLock, Key: Keyable> Eq for RwLockReadGuard<'_, '_, T, Key, R> {}
#[mutants::skip] // it's hard to get two read guards safely
+#[cfg(not(tarpaulin_include))]
impl<T: PartialOrd + ?Sized, R: RawRwLock, Key: Keyable> PartialOrd
for RwLockReadGuard<'_, '_, T, Key, R>
{
@@ -109,6 +114,7 @@ impl<T: PartialOrd + ?Sized, R: RawRwLock, Key: Keyable> PartialOrd
}
#[mutants::skip] // it's hard to get two read guards safely
+#[cfg(not(tarpaulin_include))]
impl<T: Ord + ?Sized, R: RawRwLock, Key: Keyable> Ord for RwLockReadGuard<'_, '_, T, Key, R> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.deref().cmp(&**other)
@@ -116,6 +122,7 @@ impl<T: Ord + ?Sized, R: RawRwLock, Key: Keyable> Ord for RwLockReadGuard<'_, '_
}
#[mutants::skip] // hashing involves PRNG and is hard to test
+#[cfg(not(tarpaulin_include))]
impl<T: Hash + ?Sized, R: RawRwLock, Key: Keyable> Hash for RwLockReadGuard<'_, '_, T, Key, R> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.deref().hash(state)
@@ -123,6 +130,7 @@ impl<T: Hash + ?Sized, R: RawRwLock, Key: Keyable> Hash for RwLockReadGuard<'_,
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<T: Debug + ?Sized, Key: Keyable, R: RawRwLock> Debug for RwLockReadGuard<'_, '_, T, Key, R> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&**self, f)
diff --git a/src/rwlock/read_lock.rs b/src/rwlock/read_lock.rs
index 5ac0bbb..5dd83a7 100644
--- a/src/rwlock/read_lock.rs
+++ b/src/rwlock/read_lock.rs
@@ -34,6 +34,7 @@ unsafe impl<T: Send, R: RawRwLock + Send + Sync> Sharable for ReadLock<'_, T, R>
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<T: ?Sized + Debug, R: RawRwLock> Debug for ReadLock<'_, T, R> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// safety: this is just a try lock, and the value is dropped
@@ -108,7 +109,7 @@ impl<T: ?Sized, R: RawRwLock> ReadLock<'_, T, R> {
/// use happylock::rwlock::ReadLock;
///
/// let key = ThreadKey::get().unwrap();
- /// let lock: &'static mut RwLock<_> = Box::leak(Box::new(RwLock::new(1)));
+ /// let lock: RwLock<_> = RwLock::new(1);
/// let reader = ReadLock::new(&lock);
///
/// let n = reader.lock(key);
diff --git a/src/rwlock/rwlock.rs b/src/rwlock/rwlock.rs
index 7a105d7..038e6c7 100644
--- a/src/rwlock/rwlock.rs
+++ b/src/rwlock/rwlock.rs
@@ -141,6 +141,7 @@ impl<T, R: RawRwLock> RwLock<T, R> {
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<T: ?Sized + Debug, R: RawRwLock> Debug for RwLock<T, R> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// safety: this is just a try lock, and the value is dropped
diff --git a/src/rwlock/write_guard.rs b/src/rwlock/write_guard.rs
index ff559b8..c971260 100644
--- a/src/rwlock/write_guard.rs
+++ b/src/rwlock/write_guard.rs
@@ -34,6 +34,7 @@ impl<T: Ord + ?Sized, R: RawRwLock> Ord for RwLockWriteRef<'_, T, R> {
}
#[mutants::skip] // hashing involves PRNG and is difficult to test
+#[cfg(not(tarpaulin_include))]
impl<T: Hash + ?Sized, R: RawRwLock> Hash for RwLockWriteRef<'_, T, R> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.deref().hash(state)
@@ -41,6 +42,7 @@ impl<T: Hash + ?Sized, R: RawRwLock> Hash for RwLockWriteRef<'_, T, R> {
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<T: Debug + ?Sized, R: RawRwLock> Debug for RwLockWriteRef<'_, T, R> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&**self, f)
@@ -103,6 +105,7 @@ impl<'a, T: ?Sized + 'a, R: RawRwLock> RwLockWriteRef<'a, T, R> {
}
#[mutants::skip] // it's hard to get two read guards safely
+#[cfg(not(tarpaulin_include))]
impl<T: PartialEq + ?Sized, R: RawRwLock, Key: Keyable> PartialEq
for RwLockWriteGuard<'_, '_, T, Key, R>
{
@@ -112,9 +115,11 @@ impl<T: PartialEq + ?Sized, R: RawRwLock, Key: Keyable> PartialEq
}
#[mutants::skip] // it's hard to get two read guards safely
+#[cfg(not(tarpaulin_include))]
impl<T: Eq + ?Sized, R: RawRwLock, Key: Keyable> Eq for RwLockWriteGuard<'_, '_, T, Key, R> {}
#[mutants::skip] // it's hard to get two read guards safely
+#[cfg(not(tarpaulin_include))]
impl<T: PartialOrd + ?Sized, R: RawRwLock, Key: Keyable> PartialOrd
for RwLockWriteGuard<'_, '_, T, Key, R>
{
@@ -124,6 +129,7 @@ impl<T: PartialOrd + ?Sized, R: RawRwLock, Key: Keyable> PartialOrd
}
#[mutants::skip] // it's hard to get two read guards safely
+#[cfg(not(tarpaulin_include))]
impl<T: Ord + ?Sized, R: RawRwLock, Key: Keyable> Ord for RwLockWriteGuard<'_, '_, T, Key, R> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.deref().cmp(&**other)
@@ -131,6 +137,7 @@ impl<T: Ord + ?Sized, R: RawRwLock, Key: Keyable> Ord for RwLockWriteGuard<'_, '
}
#[mutants::skip] // hashing involves PRNG and is difficult to test
+#[cfg(not(tarpaulin_include))]
impl<T: Hash + ?Sized, R: RawRwLock, Key: Keyable> Hash for RwLockWriteGuard<'_, '_, T, Key, R> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.deref().hash(state)
@@ -138,6 +145,7 @@ impl<T: Hash + ?Sized, R: RawRwLock, Key: Keyable> Hash for RwLockWriteGuard<'_,
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<T: Debug + ?Sized, Key: Keyable, R: RawRwLock> Debug for RwLockWriteGuard<'_, '_, T, Key, R> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&**self, f)
diff --git a/src/rwlock/write_lock.rs b/src/rwlock/write_lock.rs
index 443fbcd..cc96953 100644
--- a/src/rwlock/write_lock.rs
+++ b/src/rwlock/write_lock.rs
@@ -26,6 +26,7 @@ unsafe impl<T: Send, R: RawRwLock + Send + Sync> Lockable for WriteLock<'_, T, R
// no way to express that. I don't think I want to ever express that.
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<T: ?Sized + Debug, R: RawRwLock> Debug for WriteLock<'_, T, R> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// safety: this is just a try lock, and the value is dropped
diff --git a/tests/evil_mutex.rs b/tests/evil_mutex.rs
index 88bcbf6..361fe4c 100644
--- a/tests/evil_mutex.rs
+++ b/tests/evil_mutex.rs
@@ -1,6 +1,6 @@
use std::sync::Arc;
-use happylock::collection::BoxedLockCollection;
+use happylock::collection::{BoxedLockCollection, RetryingLockCollection};
use happylock::mutex::Mutex;
use happylock::ThreadKey;
use lock_api::{GuardNoSend, RawMutex};
@@ -52,3 +52,27 @@ fn boxed_mutexes() {
assert!(evil_mutex.try_lock(&mut key).is_err());
assert!(useless_mutex.try_lock(&mut key).is_ok());
}
+
+#[test]
+fn retrying_mutexes() {
+ let mut key = ThreadKey::get().unwrap();
+ let good_mutex: Arc<Mutex<i32, parking_lot::RawMutex>> = Arc::new(Mutex::new(5));
+ let evil_mutex: Arc<Mutex<i32, EvilMutex>> = Arc::new(Mutex::new(7));
+ let useless_mutex: Arc<Mutex<i32, parking_lot::RawMutex>> = Arc::new(Mutex::new(10));
+ let c_good = Arc::clone(&good_mutex);
+ let c_evil = Arc::clone(&evil_mutex);
+ let c_useless = Arc::clone(&useless_mutex);
+
+ let r = std::thread::spawn(move || {
+ let mut key = ThreadKey::get().unwrap();
+ let collection =
+ RetryingLockCollection::try_new((&*c_good, &*c_evil, &*c_useless)).unwrap();
+ collection.lock(&mut key);
+ })
+ .join();
+
+ assert!(r.is_err());
+ assert!(good_mutex.try_lock(&mut key).is_ok());
+ assert!(evil_mutex.try_lock(&mut key).is_err());
+ assert!(useless_mutex.try_lock(&mut key).is_ok());
+}
diff --git a/tests/evil_rwlock.rs b/tests/evil_rwlock.rs
index a1cf0b3..234847c 100644
--- a/tests/evil_rwlock.rs
+++ b/tests/evil_rwlock.rs
@@ -1,6 +1,6 @@
use std::sync::Arc;
-use happylock::collection::BoxedLockCollection;
+use happylock::collection::{BoxedLockCollection, RetryingLockCollection};
use happylock::rwlock::RwLock;
use happylock::ThreadKey;
use lock_api::{GuardNoSend, RawRwLock};
@@ -64,3 +64,27 @@ fn boxed_rwlocks() {
assert!(evil_mutex.try_write(&mut key).is_err());
assert!(useless_mutex.try_write(&mut key).is_ok());
}
+
+#[test]
+fn retrying_rwlocks() {
+ let mut key = ThreadKey::get().unwrap();
+ let good_mutex: Arc<RwLock<i32, parking_lot::RawRwLock>> = Arc::new(RwLock::new(5));
+ let evil_mutex: Arc<RwLock<i32, EvilRwLock>> = Arc::new(RwLock::new(7));
+ let useless_mutex: Arc<RwLock<i32, parking_lot::RawRwLock>> = Arc::new(RwLock::new(10));
+ let c_good = Arc::clone(&good_mutex);
+ let c_evil = Arc::clone(&evil_mutex);
+ let c_useless = Arc::clone(&useless_mutex);
+
+ let r = std::thread::spawn(move || {
+ let mut key = ThreadKey::get().unwrap();
+ let collection =
+ RetryingLockCollection::try_new((&*c_good, &*c_evil, &*c_useless)).unwrap();
+ collection.lock(&mut key);
+ })
+ .join();
+
+ assert!(r.is_err());
+ assert!(good_mutex.try_write(&mut key).is_ok());
+ assert!(evil_mutex.try_write(&mut key).is_err());
+ assert!(useless_mutex.try_write(&mut key).is_ok());
+}