summaryrefslogtreecommitdiff
path: root/src/collection
diff options
context:
space:
mode:
Diffstat (limited to 'src/collection')
-rw-r--r--src/collection/boxed.rs102
-rw-r--r--src/collection/guard.rs63
-rw-r--r--src/collection/owned.rs3
-rw-r--r--src/collection/ref.rs2
-rw-r--r--src/collection/retry.rs111
-rw-r--r--src/collection/utils.rs46
6 files changed, 251 insertions, 76 deletions
diff --git a/src/collection/boxed.rs b/src/collection/boxed.rs
index 98d7632..72489bf 100644
--- a/src/collection/boxed.rs
+++ b/src/collection/boxed.rs
@@ -22,6 +22,7 @@ fn contains_duplicates(l: &[&dyn RawLock]) -> bool {
unsafe impl<L: Lockable> RawLock for BoxedLockCollection<L> {
#[mutants::skip] // this should never be called
+ #[cfg(not(tarpaulin_include))]
fn poison(&self) {
for lock in &self.locks {
lock.poison();
@@ -33,6 +34,7 @@ unsafe impl<L: Lockable> RawLock for BoxedLockCollection<L> {
}
unsafe fn raw_try_lock(&self) -> bool {
+ println!("{}", self.locks().len());
utils::ordered_try_lock(self.locks())
}
@@ -136,6 +138,7 @@ unsafe impl<L: Sync> Sync for BoxedLockCollection<L> {}
impl<L> Drop for BoxedLockCollection<L> {
#[mutants::skip] // i can't test for a memory leak
+ #[cfg(not(tarpaulin_include))]
fn drop(&mut self) {
unsafe {
// safety: this collection will never be locked again
@@ -155,6 +158,7 @@ impl<T, L: AsRef<T>> AsRef<T> for BoxedLockCollection<L> {
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<L: Debug> Debug for BoxedLockCollection<L> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct(stringify!(BoxedLockCollection))
@@ -197,8 +201,8 @@ impl<L> BoxedLockCollection<L> {
#[must_use]
pub fn into_child(mut self) -> L {
unsafe {
- // safety: this collection will never be locked again
- self.locks.clear();
+ // safety: this collection will never be used again
+ std::ptr::drop_in_place(&mut self.locks);
// safety: this was allocated using a box, and is now unique
let boxed: Box<UnsafeCell<L>> = Box::from_raw(self.data.cast_mut());
// to prevent a double free
@@ -621,7 +625,7 @@ where
#[cfg(test)]
mod tests {
use super::*;
- use crate::{Mutex, ThreadKey};
+ use crate::{Mutex, RwLock, ThreadKey};
#[test]
fn non_duplicates_allowed() {
@@ -637,6 +641,98 @@ mod tests {
}
#[test]
+ fn contains_duplicates_empty() {
+ assert!(!contains_duplicates(&[]))
+ }
+
+ #[test]
+ fn try_lock_works() {
+ let key = ThreadKey::get().unwrap();
+ let collection = BoxedLockCollection::new([Mutex::new(1), Mutex::new(2)]);
+ let guard = collection.try_lock(key);
+
+ std::thread::scope(|s| {
+ s.spawn(|| {
+ let key = ThreadKey::get().unwrap();
+ let guard = collection.try_lock(key);
+ assert!(guard.is_err());
+ });
+ });
+
+ assert!(guard.is_ok());
+ }
+
+ #[test]
+ fn try_read_works() {
+ let key = ThreadKey::get().unwrap();
+ let collection = BoxedLockCollection::new([RwLock::new(1), RwLock::new(2)]);
+ let guard = collection.try_read(key);
+
+ std::thread::scope(|s| {
+ s.spawn(|| {
+ let key = ThreadKey::get().unwrap();
+ let guard = collection.try_read(key);
+ assert!(guard.is_ok());
+ });
+ });
+
+ assert!(guard.is_ok());
+ }
+
+ #[test]
+ fn try_lock_fails_with_one_exclusive_lock() {
+ let key = ThreadKey::get().unwrap();
+ let locks = [Mutex::new(1), Mutex::new(2)];
+ let collection = BoxedLockCollection::new_ref(&locks);
+ let guard = locks[1].try_lock(key);
+
+ std::thread::scope(|s| {
+ s.spawn(|| {
+ let key = ThreadKey::get().unwrap();
+ let guard = collection.try_lock(key);
+ assert!(guard.is_err());
+ });
+ });
+
+ assert!(guard.is_ok());
+ }
+
+ #[test]
+ fn try_read_fails_during_exclusive_lock() {
+ let key = ThreadKey::get().unwrap();
+ let collection = BoxedLockCollection::new([RwLock::new(1), RwLock::new(2)]);
+ let guard = collection.try_lock(key);
+
+ std::thread::scope(|s| {
+ s.spawn(|| {
+ let key = ThreadKey::get().unwrap();
+ let guard = collection.try_read(key);
+ assert!(guard.is_err());
+ });
+ });
+
+ assert!(guard.is_ok());
+ }
+
+ #[test]
+ fn try_read_fails_with_one_exclusive_lock() {
+ let key = ThreadKey::get().unwrap();
+ let locks = [RwLock::new(1), RwLock::new(2)];
+ let collection = BoxedLockCollection::new_ref(&locks);
+ let guard = locks[1].try_write(key);
+
+ std::thread::scope(|s| {
+ s.spawn(|| {
+ let key = ThreadKey::get().unwrap();
+ let guard = collection.try_read(key);
+ assert!(guard.is_err());
+ });
+ });
+
+ assert!(guard.is_ok());
+ }
+
+ #[test]
fn works_in_collection() {
let key = ThreadKey::get().unwrap();
let mutex1 = Mutex::new(0);
diff --git a/src/collection/guard.rs b/src/collection/guard.rs
index 9412343..eea13ed 100644
--- a/src/collection/guard.rs
+++ b/src/collection/guard.rs
@@ -7,6 +7,7 @@ use crate::key::Keyable;
use super::LockGuard;
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<Guard: PartialEq, Key: Keyable> PartialEq for LockGuard<'_, Guard, Key> {
fn eq(&self, other: &Self) -> bool {
self.guard.eq(&other.guard)
@@ -14,6 +15,7 @@ impl<Guard: PartialEq, Key: Keyable> PartialEq for LockGuard<'_, Guard, Key> {
}
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<Guard: PartialOrd, Key: Keyable> PartialOrd for LockGuard<'_, Guard, Key> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.guard.partial_cmp(&other.guard)
@@ -21,9 +23,11 @@ impl<Guard: PartialOrd, Key: Keyable> PartialOrd for LockGuard<'_, Guard, Key> {
}
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<Guard: Eq, Key: Keyable> Eq for LockGuard<'_, Guard, Key> {}
#[mutants::skip] // it's hard to get two guards safely
+#[cfg(not(tarpaulin_include))]
impl<Guard: Ord, Key: Keyable> Ord for LockGuard<'_, Guard, Key> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.guard.cmp(&other.guard)
@@ -31,6 +35,7 @@ impl<Guard: Ord, Key: Keyable> Ord for LockGuard<'_, Guard, Key> {
}
#[mutants::skip] // hashing involves RNG and is hard to test
+#[cfg(not(tarpaulin_include))]
impl<Guard: Hash, Key: Keyable> Hash for LockGuard<'_, Guard, Key> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.guard.hash(state)
@@ -38,6 +43,7 @@ impl<Guard: Hash, Key: Keyable> Hash for LockGuard<'_, Guard, Key> {
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<Guard: Debug, Key: Keyable> Debug for LockGuard<'_, Guard, Key> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&**self, f)
@@ -79,7 +85,7 @@ impl<Guard, Key: Keyable> AsMut<Guard> for LockGuard<'_, Guard, Key> {
#[cfg(test)]
mod tests {
use crate::collection::OwnedLockCollection;
- use crate::{RwLock, ThreadKey};
+ use crate::{LockCollection, Mutex, RwLock, ThreadKey};
#[test]
fn guard_display_works() {
@@ -88,4 +94,59 @@ mod tests {
let guard = lock.read(key);
assert_eq!(guard.to_string(), "Hello, world!".to_string());
}
+
+ #[test]
+ fn deref_mut_works() {
+ let mut key = ThreadKey::get().unwrap();
+ let locks = (Mutex::new(1), Mutex::new(2));
+ let lock = LockCollection::new_ref(&locks);
+ let mut guard = lock.lock(&mut key);
+ *guard.0 = 3;
+ drop(guard);
+
+ let guard = locks.0.lock(&mut key);
+ assert_eq!(*guard, 3);
+ drop(guard);
+
+ let guard = locks.1.lock(&mut key);
+ assert_eq!(*guard, 2);
+ drop(guard);
+ }
+
+ #[test]
+ fn as_ref_works() {
+ let mut key = ThreadKey::get().unwrap();
+ let locks = (Mutex::new(1), Mutex::new(2));
+ let lock = LockCollection::new_ref(&locks);
+ let mut guard = lock.lock(&mut key);
+ *guard.0 = 3;
+ drop(guard);
+
+ let guard = locks.0.lock(&mut key);
+ assert_eq!(guard.as_ref(), &3);
+ drop(guard);
+
+ let guard = locks.1.lock(&mut key);
+ assert_eq!(guard.as_ref(), &2);
+ drop(guard);
+ }
+
+ #[test]
+ fn as_mut_works() {
+ let mut key = ThreadKey::get().unwrap();
+ let locks = (Mutex::new(1), Mutex::new(2));
+ let lock = LockCollection::new_ref(&locks);
+ let mut guard = lock.lock(&mut key);
+ let guard_mut = guard.as_mut();
+ *guard_mut.0 = 3;
+ drop(guard);
+
+ let guard = locks.0.lock(&mut key);
+ assert_eq!(guard.as_ref(), &3);
+ drop(guard);
+
+ let guard = locks.1.lock(&mut key);
+ assert_eq!(guard.as_ref(), &2);
+ drop(guard);
+ }
}
diff --git a/src/collection/owned.rs b/src/collection/owned.rs
index a96300d..4a0d1ef 100644
--- a/src/collection/owned.rs
+++ b/src/collection/owned.rs
@@ -8,6 +8,7 @@ use crate::Keyable;
use super::{utils, LockGuard, OwnedLockCollection};
#[mutants::skip] // it's hard to test individual locks in an OwnedLockCollection
+#[cfg(not(tarpaulin_include))]
fn get_locks<L: Lockable>(data: &L) -> Vec<&dyn RawLock> {
let mut locks = Vec::new();
data.get_ptrs(&mut locks);
@@ -16,6 +17,7 @@ fn get_locks<L: Lockable>(data: &L) -> Vec<&dyn RawLock> {
unsafe impl<L: Lockable> RawLock for OwnedLockCollection<L> {
#[mutants::skip] // this should never run
+ #[cfg(not(tarpaulin_include))]
fn poison(&self) {
let locks = get_locks(&self.data);
for lock in locks {
@@ -63,6 +65,7 @@ unsafe impl<L: Lockable> Lockable for OwnedLockCollection<L> {
Self: 'g;
#[mutants::skip] // It's hard to test lkocks in an OwnedLockCollection, because they're owned
+ #[cfg(not(tarpaulin_include))]
fn get_ptrs<'a>(&'a self, ptrs: &mut Vec<&'a dyn RawLock>) {
self.data.get_ptrs(ptrs)
}
diff --git a/src/collection/ref.rs b/src/collection/ref.rs
index 512bdec..37973f6 100644
--- a/src/collection/ref.rs
+++ b/src/collection/ref.rs
@@ -41,6 +41,7 @@ where
unsafe impl<L: Lockable> RawLock for RefLockCollection<'_, L> {
#[mutants::skip] // this should never run
+ #[cfg(not(tarpaulin_include))]
fn poison(&self) {
for lock in &self.locks {
lock.poison();
@@ -109,6 +110,7 @@ impl<T, L: AsRef<T>> AsRef<T> for RefLockCollection<'_, L> {
}
#[mutants::skip]
+#[cfg(not(tarpaulin_include))]
impl<L: Debug> Debug for RefLockCollection<'_, L> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct(stringify!(RefLockCollection))
diff --git a/src/collection/retry.rs b/src/collection/retry.rs
index fe0a5b8..331b669 100644
--- a/src/collection/retry.rs
+++ b/src/collection/retry.rs
@@ -1,4 +1,4 @@
-use std::cell::RefCell;
+use std::cell::Cell;
use std::collections::HashSet;
use std::marker::PhantomData;
@@ -9,6 +9,7 @@ use crate::lockable::{
};
use crate::Keyable;
+use super::utils::{attempt_to_recover_locks_from_panic, attempt_to_recover_reads_from_panic};
use super::{LockGuard, RetryingLockCollection};
/// Get all raw locks in the collection
@@ -37,6 +38,7 @@ fn contains_duplicates<L: Lockable>(data: L) -> bool {
unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
#[mutants::skip] // this should never run
+ #[cfg(not(tarpaulin_include))]
fn poison(&self) {
let locks = get_locks(&self.data);
for lock in locks {
@@ -45,7 +47,6 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
}
unsafe fn raw_lock(&self) {
- let mut first_index = 0;
let locks = get_locks(&self.data);
if locks.is_empty() {
@@ -54,16 +55,17 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
}
// these will be unlocked in case of a panic
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let first_index = Cell::new(0);
+ let locked = Cell::new(0);
handle_unwind(
|| unsafe {
'outer: loop {
// This prevents us from entering a spin loop waiting for
// the same lock to be unlocked
// safety: we have the thread key
- locks[first_index].raw_lock();
+ locks[first_index.get()].raw_lock();
for (i, lock) in locks.iter().enumerate() {
- if i == first_index {
+ if i == first_index.get() {
// we've already locked this one
continue;
}
@@ -74,23 +76,21 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
// immediately after, causing a panic
// safety: we have the thread key
if lock.raw_try_lock() {
- locked.borrow_mut().push(*lock)
+ locked.set(locked.get() + 1);
} else {
- for lock in locked.borrow().iter() {
- // safety: we already locked all of these
- lock.raw_unlock();
+ // safety: we already locked all of these
+ attempt_to_recover_locks_from_panic(&locks[0..i]);
+ if first_index.get() >= i {
+ // safety: this is already locked and can't be
+ // unlocked by the previous loop
+ locks[first_index.get()].raw_unlock();
}
- // these are no longer locked
- locked.borrow_mut().clear();
- if first_index >= i {
- // safety: this is already locked and can't be unlocked
- // by the previous loop
- locks[first_index].raw_unlock();
- }
+ // nothing is locked anymore
+ locked.set(0);
// call lock on this to prevent a spin loop
- first_index = i;
+ first_index.set(i);
continue 'outer;
}
}
@@ -99,7 +99,12 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
break;
}
},
- || utils::attempt_to_recover_locks_from_panic(&locked),
+ || {
+ utils::attempt_to_recover_locks_from_panic(&locks[0..locked.get()]);
+ if first_index.get() >= locked.get() {
+ locks[first_index.get()].raw_unlock();
+ }
+ },
)
}
@@ -113,25 +118,23 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
}
// these will be unlocked in case of a panic
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let locked = Cell::new(0);
handle_unwind(
|| unsafe {
for (i, lock) in locks.iter().enumerate() {
// safety: we have the thread key
if lock.raw_try_lock() {
- locked.borrow_mut().push(*lock);
+ locked.set(locked.get() + 1);
} else {
- for lock in locks.iter().take(i) {
- // safety: we already locked all of these
- lock.raw_unlock();
- }
+ // safety: we already locked all of these
+ attempt_to_recover_locks_from_panic(&locks[0..i]);
return false;
}
}
true
},
- || utils::attempt_to_recover_locks_from_panic(&locked),
+ || utils::attempt_to_recover_locks_from_panic(&locks[0..locked.get()]),
)
}
@@ -144,7 +147,6 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
}
unsafe fn raw_read(&self) {
- let mut first_index = 0;
let locks = get_locks(&self.data);
if locks.is_empty() {
@@ -152,35 +154,35 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
return;
}
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let locked = Cell::new(0);
+ let first_index = Cell::new(0);
handle_unwind(
|| 'outer: loop {
// safety: we have the thread key
- locks[first_index].raw_read();
+ locks[first_index.get()].raw_read();
for (i, lock) in locks.iter().enumerate() {
- if i == first_index {
+ if i == first_index.get() {
continue;
}
// safety: we have the thread key
if lock.raw_try_read() {
- locked.borrow_mut().push(*lock);
+ locked.set(locked.get() + 1);
} else {
- for lock in locked.borrow().iter() {
- // safety: we already locked all of these
- lock.raw_unlock_read();
- }
- // these are no longer locked
- locked.borrow_mut().clear();
+ // safety: we already locked all of these
+ attempt_to_recover_reads_from_panic(&locks[0..i]);
- if first_index >= i {
+ if first_index.get() >= i {
// safety: this is already locked and can't be unlocked
// by the previous loop
- locks[first_index].raw_unlock_read();
+ locks[first_index.get()].raw_unlock_read();
}
+ // these are no longer locked
+ locked.set(0);
+
// don't go into a spin loop, wait for this one to lock
- first_index = i;
+ first_index.set(i);
continue 'outer;
}
}
@@ -188,7 +190,12 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
// safety: we locked all the data
break;
},
- || utils::attempt_to_recover_reads_from_panic(&locked),
+ || {
+ utils::attempt_to_recover_reads_from_panic(&locks[0..locked.get()]);
+ if first_index.get() >= locked.get() {
+ locks[first_index.get()].raw_unlock_read();
+ }
+ },
)
}
@@ -201,25 +208,23 @@ unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
return true;
}
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let locked = Cell::new(0);
handle_unwind(
|| unsafe {
for (i, lock) in locks.iter().enumerate() {
// safety: we have the thread key
if lock.raw_try_read() {
- locked.borrow_mut().push(*lock);
+ locked.set(locked.get() + 1);
} else {
- for lock in locks.iter().take(i) {
- // safety: we already locked all of these
- lock.raw_unlock_read();
- }
+ // safety: we already locked all of these
+ attempt_to_recover_reads_from_panic(&locks[0..i]);
return false;
}
}
true
},
- || utils::attempt_to_recover_reads_from_panic(&locked),
+ || utils::attempt_to_recover_reads_from_panic(&locks[0..locked.get()]),
)
}
@@ -901,4 +906,16 @@ mod tests {
assert_eq!(collection.into_inner().len(), 2);
}
+
+ #[test]
+ fn lock_empty_lock_collection() {
+ let mut key = ThreadKey::get().unwrap();
+ let collection: RetryingLockCollection<[RwLock<i32>; 0]> = RetryingLockCollection::new([]);
+
+ let guard = collection.lock(&mut key);
+ assert!(guard.len() == 0);
+
+ let guard = collection.read(&mut key);
+ assert!(guard.len() == 0);
+ }
}
diff --git a/src/collection/utils.rs b/src/collection/utils.rs
index d368773..7f29037 100644
--- a/src/collection/utils.rs
+++ b/src/collection/utils.rs
@@ -1,4 +1,4 @@
-use std::cell::RefCell;
+use std::cell::Cell;
use crate::handle_unwind::handle_unwind;
use crate::lockable::RawLock;
@@ -6,31 +6,31 @@ use crate::lockable::RawLock;
/// Lock a set of locks in the given order. It's UB to call this without a `ThreadKey`
pub unsafe fn ordered_lock(locks: &[&dyn RawLock]) {
// these will be unlocked in case of a panic
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let locked = Cell::new(0);
handle_unwind(
|| {
for lock in locks {
lock.raw_lock();
- locked.borrow_mut().push(*lock);
+ locked.set(locked.get() + 1);
}
},
- || attempt_to_recover_locks_from_panic(&locked),
+ || attempt_to_recover_locks_from_panic(&locks[0..locked.get()]),
)
}
/// Lock a set of locks in the given order. It's UB to call this without a `ThreadKey`
pub unsafe fn ordered_read(locks: &[&dyn RawLock]) {
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let locked = Cell::new(0);
handle_unwind(
|| {
for lock in locks {
lock.raw_read();
- locked.borrow_mut().push(*lock);
+ locked.set(locked.get() + 1);
}
},
- || attempt_to_recover_reads_from_panic(&locked),
+ || attempt_to_recover_reads_from_panic(&locks[0..locked.get()]),
)
}
@@ -38,14 +38,14 @@ pub unsafe fn ordered_read(locks: &[&dyn RawLock]) {
/// locks contain duplicates, or if this is called by multiple threads with the
/// locks in different orders.
pub unsafe fn ordered_try_lock(locks: &[&dyn RawLock]) -> bool {
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let locked = Cell::new(0);
handle_unwind(
|| unsafe {
for (i, lock) in locks.iter().enumerate() {
// safety: we have the thread key
if lock.raw_try_lock() {
- locked.borrow_mut().push(*lock);
+ locked.set(locked.get() + 1);
} else {
for lock in &locks[0..i] {
// safety: this lock was already acquired
@@ -59,7 +59,7 @@ pub unsafe fn ordered_try_lock(locks: &[&dyn RawLock]) -> bool {
},
||
// safety: everything in locked is locked
- attempt_to_recover_locks_from_panic(&locked),
+ attempt_to_recover_locks_from_panic(&locks[0..locked.get()]),
)
}
@@ -67,14 +67,14 @@ pub unsafe fn ordered_try_lock(locks: &[&dyn RawLock]) -> bool {
/// is called by multiple threads with the locks in different orders.
pub unsafe fn ordered_try_read(locks: &[&dyn RawLock]) -> bool {
// these will be unlocked in case of a panic
- let locked = RefCell::new(Vec::with_capacity(locks.len()));
+ let locked = Cell::new(0);
handle_unwind(
|| unsafe {
for (i, lock) in locks.iter().enumerate() {
// safety: we have the thread key
if lock.raw_try_read() {
- locked.borrow_mut().push(*lock);
+ locked.set(locked.get() + 1);
} else {
for lock in &locks[0..i] {
// safety: this lock was already acquired
@@ -88,34 +88,30 @@ pub unsafe fn ordered_try_read(locks: &[&dyn RawLock]) -> bool {
},
||
// safety: everything in locked is locked
- attempt_to_recover_reads_from_panic(&locked),
+ attempt_to_recover_reads_from_panic(&locks[0..locked.get()]),
)
}
/// Unlocks the already locked locks in order to recover from a panic
-pub unsafe fn attempt_to_recover_locks_from_panic(locked: &RefCell<Vec<&dyn RawLock>>) {
+pub unsafe fn attempt_to_recover_locks_from_panic(locks: &[&dyn RawLock]) {
handle_unwind(
|| {
- let mut locked = locked.borrow_mut();
- while let Some(locked_lock) = locked.pop() {
- locked_lock.raw_unlock();
- }
+ // safety: the caller assumes that these are already locked
+ locks.iter().for_each(|lock| lock.raw_unlock());
},
// if we get another panic in here, we'll just have to poison what remains
- || locked.borrow().iter().for_each(|l| l.poison()),
+ || locks.iter().for_each(|l| l.poison()),
)
}
/// Unlocks the already locked locks in order to recover from a panic
-pub unsafe fn attempt_to_recover_reads_from_panic(locked: &RefCell<Vec<&dyn RawLock>>) {
+pub unsafe fn attempt_to_recover_reads_from_panic(locked: &[&dyn RawLock]) {
handle_unwind(
|| {
- let mut locked = locked.borrow_mut();
- while let Some(locked_lock) = locked.pop() {
- locked_lock.raw_unlock_read();
- }
+ // safety: the caller assumes these are already locked
+ locked.iter().for_each(|lock| lock.raw_unlock());
},
// if we get another panic in here, we'll just have to poison what remains
- || locked.borrow().iter().for_each(|l| l.poison()),
+ || locked.iter().for_each(|l| l.poison()),
)
}