summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorMica White <botahamec@gmail.com>2024-12-26 11:06:23 -0500
committerMica White <botahamec@gmail.com>2024-12-26 11:26:29 -0500
commit096afea6f13692fddbfad0b07e5377cb2e81dd58 (patch)
tree53c252e3277683e7e8686539fde83e6cc5e1762d /src
parenta060123077b94f61e3d0802a6977ad547276fd1b (diff)
Rename kill to poison
Diffstat (limited to 'src')
-rw-r--r--src/collection/boxed.rs6
-rw-r--r--src/collection/owned.rs4
-rw-r--r--src/collection/ref.rs4
-rw-r--r--src/collection/retry.rs4
-rw-r--r--src/collection/utils.rs4
-rw-r--r--src/lockable.rs2
-rw-r--r--src/mutex/mutex.rs8
-rw-r--r--src/poisonable/poisonable.rs4
-rw-r--r--src/rwlock/rwlock.rs14
9 files changed, 26 insertions, 24 deletions
diff --git a/src/collection/boxed.rs b/src/collection/boxed.rs
index 2397bd3..a048d2b 100644
--- a/src/collection/boxed.rs
+++ b/src/collection/boxed.rs
@@ -21,9 +21,9 @@ fn contains_duplicates(l: &[&dyn RawLock]) -> bool {
}
unsafe impl<L: Lockable> RawLock for BoxedLockCollection<L> {
- fn kill(&self) {
+ fn poison(&self) {
for lock in &self.locks {
- lock.kill();
+ lock.poison();
}
}
@@ -196,6 +196,8 @@ impl<L> BoxedLockCollection<L> {
self.locks.clear();
// safety: this was allocated using a box, and is now unique
let boxed: Box<UnsafeCell<L>> = Box::from_raw(self.data.cast_mut());
+ // to prevent a double free
+ std::mem::forget(self);
boxed.into_inner()
}
diff --git a/src/collection/owned.rs b/src/collection/owned.rs
index e4cfe46..59e1ff8 100644
--- a/src/collection/owned.rs
+++ b/src/collection/owned.rs
@@ -14,10 +14,10 @@ fn get_locks<L: Lockable>(data: &L) -> Vec<&dyn RawLock> {
}
unsafe impl<L: Lockable> RawLock for OwnedLockCollection<L> {
- fn kill(&self) {
+ fn poison(&self) {
let locks = get_locks(&self.data);
for lock in locks {
- lock.kill();
+ lock.poison();
}
}
diff --git a/src/collection/ref.rs b/src/collection/ref.rs
index 4fa5485..a9c3579 100644
--- a/src/collection/ref.rs
+++ b/src/collection/ref.rs
@@ -40,9 +40,9 @@ where
}
unsafe impl<L: Lockable> RawLock for RefLockCollection<'_, L> {
- fn kill(&self) {
+ fn poison(&self) {
for lock in &self.locks {
- lock.kill();
+ lock.poison();
}
}
diff --git a/src/collection/retry.rs b/src/collection/retry.rs
index 687c5ec..cb6a1fb 100644
--- a/src/collection/retry.rs
+++ b/src/collection/retry.rs
@@ -36,10 +36,10 @@ fn contains_duplicates<L: Lockable>(data: L) -> bool {
}
unsafe impl<L: Lockable> RawLock for RetryingLockCollection<L> {
- fn kill(&self) {
+ fn poison(&self) {
let locks = get_locks(&self.data);
for lock in locks {
- lock.kill();
+ lock.poison();
}
}
diff --git a/src/collection/utils.rs b/src/collection/utils.rs
index f418386..36b19be 100644
--- a/src/collection/utils.rs
+++ b/src/collection/utils.rs
@@ -96,7 +96,7 @@ pub unsafe fn attempt_to_recover_locks_from_panic(locked: &RefCell<Vec<&dyn RawL
locked_lock.raw_unlock();
}
},
- || locked.borrow().iter().for_each(|l| l.kill()),
+ || locked.borrow().iter().for_each(|l| l.poison()),
)
}
@@ -108,6 +108,6 @@ pub unsafe fn attempt_to_recover_reads_from_panic(locked: &RefCell<Vec<&dyn RawL
locked_lock.raw_unlock_read();
}
},
- || locked.borrow().iter().for_each(|l| l.kill()),
+ || locked.borrow().iter().for_each(|l| l.poison()),
)
}
diff --git a/src/lockable.rs b/src/lockable.rs
index 1154d16..d599820 100644
--- a/src/lockable.rs
+++ b/src/lockable.rs
@@ -17,7 +17,7 @@ use std::mem::MaybeUninit;
pub unsafe trait RawLock {
/// Causes all subsequent calls to the `lock` function on this lock to
/// panic. This does not affect anything currently holding the lock.
- fn kill(&self);
+ fn poison(&self);
/// Blocks until the lock is acquired
///
diff --git a/src/mutex/mutex.rs b/src/mutex/mutex.rs
index 4671b4f..2cf6bbf 100644
--- a/src/mutex/mutex.rs
+++ b/src/mutex/mutex.rs
@@ -13,7 +13,7 @@ use crate::poisonable::PoisonFlag;
use super::{Mutex, MutexGuard, MutexRef};
unsafe impl<T: ?Sized, R: RawMutex> RawLock for Mutex<T, R> {
- fn kill(&self) {
+ fn poison(&self) {
self.poison.poison();
}
@@ -22,7 +22,7 @@ unsafe impl<T: ?Sized, R: RawMutex> RawLock for Mutex<T, R> {
// if the closure unwraps, then the mutex will be killed
let this = AssertUnwindSafe(self);
- handle_unwind(|| this.raw.lock(), || self.kill())
+ handle_unwind(|| this.raw.lock(), || self.poison())
}
unsafe fn raw_try_lock(&self) -> bool {
@@ -32,13 +32,13 @@ unsafe impl<T: ?Sized, R: RawMutex> RawLock for Mutex<T, R> {
// if the closure unwraps, then the mutex will be killed
let this = AssertUnwindSafe(self);
- handle_unwind(|| this.raw.try_lock(), || self.kill())
+ handle_unwind(|| this.raw.try_lock(), || self.poison())
}
unsafe fn raw_unlock(&self) {
// if the closure unwraps, then the mutex will be killed
let this = AssertUnwindSafe(self);
- handle_unwind(|| this.raw.unlock(), || self.kill())
+ handle_unwind(|| this.raw.unlock(), || self.poison())
}
// this is the closest thing to a read we can get, but Sharable isn't
diff --git a/src/poisonable/poisonable.rs b/src/poisonable/poisonable.rs
index 79f90d9..3ef1cdd 100644
--- a/src/poisonable/poisonable.rs
+++ b/src/poisonable/poisonable.rs
@@ -12,8 +12,8 @@ use super::{
};
unsafe impl<L: Lockable + RawLock> RawLock for Poisonable<L> {
- fn kill(&self) {
- self.inner.kill()
+ fn poison(&self) {
+ self.inner.poison()
}
unsafe fn raw_lock(&self) {
diff --git a/src/rwlock/rwlock.rs b/src/rwlock/rwlock.rs
index 66c7362..8bb170c 100644
--- a/src/rwlock/rwlock.rs
+++ b/src/rwlock/rwlock.rs
@@ -14,7 +14,7 @@ use crate::lockable::{
use super::{PoisonFlag, RwLock, RwLockReadGuard, RwLockReadRef, RwLockWriteGuard, RwLockWriteRef};
unsafe impl<T: ?Sized, R: RawRwLock> RawLock for RwLock<T, R> {
- fn kill(&self) {
+ fn poison(&self) {
self.poison.poison();
}
@@ -26,7 +26,7 @@ unsafe impl<T: ?Sized, R: RawRwLock> RawLock for RwLock<T, R> {
// if the closure unwraps, then the mutex will be killed
let this = AssertUnwindSafe(self);
- handle_unwind(|| this.raw.lock_exclusive(), || self.kill())
+ handle_unwind(|| this.raw.lock_exclusive(), || self.poison())
}
unsafe fn raw_try_lock(&self) -> bool {
@@ -36,13 +36,13 @@ unsafe impl<T: ?Sized, R: RawRwLock> RawLock for RwLock<T, R> {
// if the closure unwraps, then the mutex will be killed
let this = AssertUnwindSafe(self);
- handle_unwind(|| this.raw.try_lock_exclusive(), || self.kill())
+ handle_unwind(|| this.raw.try_lock_exclusive(), || self.poison())
}
unsafe fn raw_unlock(&self) {
// if the closure unwraps, then the mutex will be killed
let this = AssertUnwindSafe(self);
- handle_unwind(|| this.raw.unlock_exclusive(), || self.kill())
+ handle_unwind(|| this.raw.unlock_exclusive(), || self.poison())
}
unsafe fn raw_read(&self) {
@@ -53,7 +53,7 @@ unsafe impl<T: ?Sized, R: RawRwLock> RawLock for RwLock<T, R> {
// if the closure unwraps, then the mutex will be killed
let this = AssertUnwindSafe(self);
- handle_unwind(|| this.raw.lock_shared(), || self.kill())
+ handle_unwind(|| this.raw.lock_shared(), || self.poison())
}
unsafe fn raw_try_read(&self) -> bool {
@@ -63,13 +63,13 @@ unsafe impl<T: ?Sized, R: RawRwLock> RawLock for RwLock<T, R> {
// if the closure unwraps, then the mutex will be killed
let this = AssertUnwindSafe(self);
- handle_unwind(|| this.raw.try_lock_shared(), || self.kill())
+ handle_unwind(|| this.raw.try_lock_shared(), || self.poison())
}
unsafe fn raw_unlock_read(&self) {
// if the closure unwraps, then the mutex will be killed
let this = AssertUnwindSafe(self);
- handle_unwind(|| this.raw.unlock_shared(), || self.kill())
+ handle_unwind(|| this.raw.unlock_shared(), || self.poison())
}
}