aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2022-03-04 02:05:00 +0000
committerAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2022-03-04 02:05:00 +0000
commita455787fba5c715f9dd713435aab35325b8e03d1 (patch)
treefeb19fab9d453132173eb11e005693b621964b78
parent2a99f22e4e19a2989ac2eb2aa0aa7465b70f43c8 (diff)
parente6ce8ba539a0986e2ee611e4a715b4aaa7c0ae5b (diff)
downloadintrusive-collections-android13-platform-release.tar.gz
Snap for 8249732 from e6ce8ba539a0986e2ee611e4a715b4aaa7c0ae5b to tm-releaseandroid-vts-13.0_r8android-vts-13.0_r7android-vts-13.0_r6android-vts-13.0_r5android-vts-13.0_r4android-vts-13.0_r3android-vts-13.0_r2android-vts-13.0_r1android-security-13.0.0_r9android-security-13.0.0_r8android-security-13.0.0_r7android-security-13.0.0_r6android-security-13.0.0_r5android-security-13.0.0_r4android-security-13.0.0_r3android-security-13.0.0_r2android-security-13.0.0_r17android-security-13.0.0_r16android-security-13.0.0_r15android-security-13.0.0_r14android-security-13.0.0_r13android-security-13.0.0_r12android-security-13.0.0_r11android-security-13.0.0_r10android-security-13.0.0_r1android-platform-13.0.0_r9android-platform-13.0.0_r8android-platform-13.0.0_r7android-platform-13.0.0_r6android-platform-13.0.0_r5android-platform-13.0.0_r4android-platform-13.0.0_r3android-platform-13.0.0_r2android-platform-13.0.0_r19android-platform-13.0.0_r18android-platform-13.0.0_r17android-platform-13.0.0_r16android-platform-13.0.0_r15android-platform-13.0.0_r14android-platform-13.0.0_r13android-platform-13.0.0_r12android-platform-13.0.0_r11android-platform-13.0.0_r10android-platform-13.0.0_r1android-cts-13.0_r8android-cts-13.0_r7android-cts-13.0_r6android-cts-13.0_r5android-cts-13.0_r4android-cts-13.0_r3android-cts-13.0_r2android-cts-13.0_r1android-13.0.0_r8android-13.0.0_r7android-13.0.0_r6android-13.0.0_r5android-13.0.0_r4android-13.0.0_r31android-13.0.0_r3android-13.0.0_r2android-13.0.0_r12android-13.0.0_r1android13-tests-releaseandroid13-security-releaseandroid13-s3-releaseandroid13-s2-releaseandroid13-s1-releaseandroid13-releaseandroid13-platform-releaseandroid13-gsi
Change-Id: Ie507af1dd7e3e8da0f0647a9f4dc0f7be758a6ee
-rw-r--r--.cargo_vcs_info.json7
-rw-r--r--Android.bp2
-rw-r--r--Cargo.toml11
-rw-r--r--Cargo.toml.orig2
-rw-r--r--METADATA10
-rw-r--r--src/lib.rs4
-rw-r--r--src/linked_list.rs240
-rw-r--r--src/rbtree.rs288
-rw-r--r--src/singly_linked_list.rs210
-rw-r--r--src/xor_linked_list.rs192
10 files changed, 948 insertions, 18 deletions
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
index b9b4b0c..f08b5a6 100644
--- a/.cargo_vcs_info.json
+++ b/.cargo_vcs_info.json
@@ -1,5 +1,6 @@
{
"git": {
- "sha1": "2d89ebc111d7c3955dba15faec57ad4dbcd49d26"
- }
-}
+ "sha1": "9a11a214a4efe29f61202ffa92e110b466e516c3"
+ },
+ "path_in_vcs": ""
+} \ No newline at end of file
diff --git a/Android.bp b/Android.bp
index 6cf914d..e056d9c 100644
--- a/Android.bp
+++ b/Android.bp
@@ -45,7 +45,7 @@ rust_library {
host_supported: true,
crate_name: "intrusive_collections",
cargo_env_compat: true,
- cargo_pkg_version: "0.9.2",
+ cargo_pkg_version: "0.9.3",
srcs: ["src/lib.rs"],
edition: "2018",
features: [
diff --git a/Cargo.toml b/Cargo.toml
index 1b8f9ee..49d1efd 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -3,17 +3,16 @@
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g., crates.io) dependencies
+# to registry (e.g., crates.io) dependencies.
#
-# If you believe there's an error in this file please file an
-# issue against the rust-lang/cargo repository. If you're
-# editing this file be aware that the upstream Cargo.toml
-# will likely look very different (and much more reasonable)
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
name = "intrusive-collections"
-version = "0.9.2"
+version = "0.9.3"
authors = ["Amanieu d'Antras <amanieu@gmail.com>"]
description = "Intrusive collections for Rust (linked list and red-black tree)"
documentation = "https://docs.rs/intrusive-collections"
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index ea5413d..909123e 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,6 +1,6 @@
[package]
name = "intrusive-collections"
-version = "0.9.2"
+version = "0.9.3"
authors = ["Amanieu d'Antras <amanieu@gmail.com>"]
description = "Intrusive collections for Rust (linked list and red-black tree)"
documentation = "https://docs.rs/intrusive-collections"
diff --git a/METADATA b/METADATA
index 3ddaf5c..0439bd7 100644
--- a/METADATA
+++ b/METADATA
@@ -7,13 +7,13 @@ third_party {
}
url {
type: ARCHIVE
- value: "https://static.crates.io/crates/intrusive-collections/intrusive-collections-0.9.2.crate"
+ value: "https://static.crates.io/crates/intrusive-collections/intrusive-collections-0.9.3.crate"
}
- version: "0.9.2"
+ version: "0.9.3"
license_type: NOTICE
last_upgrade_date {
- year: 2021
- month: 7
- day: 15
+ year: 2022
+ month: 3
+ day: 1
}
}
diff --git a/src/lib.rs b/src/lib.rs
index b873910..3352ba9 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -292,14 +292,18 @@ pub mod xor_linked_list;
pub use crate::adapter::Adapter;
pub use crate::key_adapter::KeyAdapter;
pub use crate::link_ops::{DefaultLinkOps, LinkOps};
+pub use crate::linked_list::AtomicLink as LinkedListAtomicLink;
pub use crate::linked_list::Link as LinkedListLink;
pub use crate::linked_list::LinkedList;
pub use crate::pointer_ops::{DefaultPointerOps, PointerOps};
+pub use crate::rbtree::AtomicLink as RBTreeAtomicLink;
pub use crate::rbtree::Link as RBTreeLink;
pub use crate::rbtree::RBTree;
+pub use crate::singly_linked_list::AtomicLink as SinglyLinkedListAtomicLink;
pub use crate::singly_linked_list::Link as SinglyLinkedListLink;
pub use crate::singly_linked_list::SinglyLinkedList;
pub use crate::unsafe_ref::UnsafeRef;
+pub use crate::xor_linked_list::AtomicLink as XorLinkedListAtomicLink;
pub use crate::xor_linked_list::Link as XorLinkedListLink;
pub use crate::xor_linked_list::XorLinkedList;
pub use memoffset::offset_of;
diff --git a/src/linked_list.rs b/src/linked_list.rs
index efda06a..7b8e5fb 100644
--- a/src/linked_list.rs
+++ b/src/linked_list.rs
@@ -10,7 +10,8 @@
use core::cell::Cell;
use core::fmt;
-use core::ptr::NonNull;
+use core::ptr::{null_mut, NonNull};
+use core::sync::atomic::{AtomicPtr, Ordering};
use crate::link_ops::{self, DefaultLinkOps};
use crate::pointer_ops::PointerOps;
@@ -267,6 +268,243 @@ unsafe impl XorLinkedListOps for LinkOps {
}
}
+// =============================================================================
+// AtomicLink
+// =============================================================================
+
+/// Intrusive atomic link that allows an object to be inserted into a
+/// `LinkedList`. This link allows the structure to be shared between threads.
+#[repr(align(2))]
+pub struct AtomicLink {
+ next: AtomicPtr<AtomicLink>,
+ prev: Cell<Option<NonNull<AtomicLink>>>,
+}
+
+// Use a special value to indicate an unlinked node
+const ATOMIC_UNLINKED_MARKER_PTR: *mut AtomicLink = 1 as *mut AtomicLink;
+
+// Use a special value to indicate an unlinked node
+const ATOMIC_UNLINKED_MARKER: Option<NonNull<AtomicLink>> =
+ unsafe { Some(NonNull::new_unchecked(ATOMIC_UNLINKED_MARKER_PTR)) };
+
+impl AtomicLink {
+ /// Creates a new `AtomicLink`.
+ #[inline]
+ pub const fn new() -> AtomicLink {
+ Self {
+ next: AtomicPtr::new(ATOMIC_UNLINKED_MARKER_PTR),
+ prev: Cell::new(ATOMIC_UNLINKED_MARKER),
+ }
+ }
+
+ /// Checks whether the `AtomicLink` is linked into a `LinkedList`.
+ #[inline]
+ pub fn is_linked(&self) -> bool {
+ self.next.load(Ordering::Relaxed) != ATOMIC_UNLINKED_MARKER_PTR
+ }
+
+ /// Forcibly unlinks an object from a `LinkedList`.
+ ///
+ /// # Safety
+ ///
+ /// It is undefined behavior to call this function while still linked into a
+ /// `LinkedList`. The only situation where this function is useful is
+ /// after calling `fast_clear` on a `LinkedList`, since this clears
+ /// the collection without marking the nodes as unlinked.
+ #[inline]
+ pub unsafe fn force_unlink(&self) {
+ self.next
+ .store(ATOMIC_UNLINKED_MARKER_PTR, Ordering::Release)
+ }
+
+ /// Access the `next` pointer in an exclusive context.
+ ///
+ /// # Safety
+ ///
+ /// This can only be called after `acquire_link` has been succesfully called.
+ #[inline]
+ unsafe fn next_exclusive(&self) -> &Cell<Option<NonNull<AtomicLink>>> {
+ // This is safe because currently AtomicPtr<AtomicLink> has the same representation Cell<Option<NonNull<AtomicLink>>>.
+ core::mem::transmute(&self.next)
+ }
+}
+
+impl DefaultLinkOps for AtomicLink {
+ type Ops = AtomicLinkOps;
+
+ const NEW: Self::Ops = AtomicLinkOps;
+}
+
+// An object containing a link can be sent to another thread since `acquire_link` is atomic.
+unsafe impl Send for AtomicLink {}
+
+// An object containing a link can be shared between threads since `acquire_link` is atomic.
+unsafe impl Sync for AtomicLink {}
+
+impl Clone for AtomicLink {
+ #[inline]
+ fn clone(&self) -> AtomicLink {
+ AtomicLink::new()
+ }
+}
+
+impl Default for AtomicLink {
+ #[inline]
+ fn default() -> AtomicLink {
+ AtomicLink::new()
+ }
+}
+
+// Provide an implementation of Debug so that structs containing a link can
+// still derive Debug.
+impl fmt::Debug for AtomicLink {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // There isn't anything sensible to print here except whether the link
+ // is currently in a list.
+ if self.is_linked() {
+ write!(f, "linked")
+ } else {
+ write!(f, "unlinked")
+ }
+ }
+}
+
+// =============================================================================
+// AtomicLinkOps
+// =============================================================================
+
+/// Default `AtomicLinkOps` implementation for `LinkedList`.
+#[derive(Clone, Copy, Default)]
+pub struct AtomicLinkOps;
+
+unsafe impl link_ops::LinkOps for AtomicLinkOps {
+ type LinkPtr = NonNull<AtomicLink>;
+
+ #[inline]
+ unsafe fn acquire_link(&mut self, ptr: Self::LinkPtr) -> bool {
+ ptr.as_ref()
+ .next
+ .compare_exchange(
+ ATOMIC_UNLINKED_MARKER_PTR,
+ null_mut(),
+ Ordering::Acquire,
+ Ordering::Relaxed,
+ )
+ .is_ok()
+ }
+
+ #[inline]
+ unsafe fn release_link(&mut self, ptr: Self::LinkPtr) {
+ ptr.as_ref()
+ .next
+ .store(ATOMIC_UNLINKED_MARKER_PTR, Ordering::Release)
+ }
+}
+
+unsafe impl LinkedListOps for AtomicLinkOps {
+ #[inline]
+ unsafe fn next(&self, ptr: Self::LinkPtr) -> Option<Self::LinkPtr> {
+ ptr.as_ref().next_exclusive().get()
+ }
+
+ #[inline]
+ unsafe fn prev(&self, ptr: Self::LinkPtr) -> Option<Self::LinkPtr> {
+ ptr.as_ref().prev.get()
+ }
+
+ #[inline]
+ unsafe fn set_next(&mut self, ptr: Self::LinkPtr, next: Option<Self::LinkPtr>) {
+ ptr.as_ref().next_exclusive().set(next);
+ }
+
+ #[inline]
+ unsafe fn set_prev(&mut self, ptr: Self::LinkPtr, prev: Option<Self::LinkPtr>) {
+ ptr.as_ref().prev.set(prev);
+ }
+}
+
+unsafe impl SinglyLinkedListOps for AtomicLinkOps {
+ #[inline]
+ unsafe fn next(&self, ptr: Self::LinkPtr) -> Option<Self::LinkPtr> {
+ ptr.as_ref().next_exclusive().get()
+ }
+
+ #[inline]
+ unsafe fn set_next(&mut self, ptr: Self::LinkPtr, next: Option<Self::LinkPtr>) {
+ ptr.as_ref().next_exclusive().set(next);
+ }
+}
+
+unsafe impl XorLinkedListOps for AtomicLinkOps {
+ #[inline]
+ unsafe fn next(
+ &self,
+ ptr: Self::LinkPtr,
+ prev: Option<Self::LinkPtr>,
+ ) -> Option<Self::LinkPtr> {
+ let packed = ptr
+ .as_ref()
+ .next_exclusive()
+ .get()
+ .map(|x| x.as_ptr() as usize)
+ .unwrap_or(0);
+ let raw = packed ^ prev.map(|x| x.as_ptr() as usize).unwrap_or(0);
+ NonNull::new(raw as *mut _)
+ }
+
+ #[inline]
+ unsafe fn prev(
+ &self,
+ ptr: Self::LinkPtr,
+ next: Option<Self::LinkPtr>,
+ ) -> Option<Self::LinkPtr> {
+ let packed = ptr
+ .as_ref()
+ .next_exclusive()
+ .get()
+ .map(|x| x.as_ptr() as usize)
+ .unwrap_or(0);
+ let raw = packed ^ next.map(|x| x.as_ptr() as usize).unwrap_or(0);
+ NonNull::new(raw as *mut _)
+ }
+
+ #[inline]
+ unsafe fn set(
+ &mut self,
+ ptr: Self::LinkPtr,
+ prev: Option<Self::LinkPtr>,
+ next: Option<Self::LinkPtr>,
+ ) {
+ let new_packed = prev.map(|x| x.as_ptr() as usize).unwrap_or(0)
+ ^ next.map(|x| x.as_ptr() as usize).unwrap_or(0);
+
+ let new_next = NonNull::new(new_packed as *mut _);
+ ptr.as_ref().next_exclusive().set(new_next);
+ }
+
+ #[inline]
+ unsafe fn replace_next_or_prev(
+ &mut self,
+ ptr: Self::LinkPtr,
+ old: Option<Self::LinkPtr>,
+ new: Option<Self::LinkPtr>,
+ ) {
+ let packed = ptr
+ .as_ref()
+ .next_exclusive()
+ .get()
+ .map(|x| x.as_ptr() as usize)
+ .unwrap_or(0);
+ let new_packed = packed
+ ^ old.map(|x| x.as_ptr() as usize).unwrap_or(0)
+ ^ new.map(|x| x.as_ptr() as usize).unwrap_or(0);
+
+ let new_next = NonNull::new(new_packed as *mut _);
+ ptr.as_ref().next_exclusive().set(new_next);
+ }
+}
+
#[inline]
unsafe fn link_between<T: LinkedListOps>(
link_ops: &mut T,
diff --git a/src/rbtree.rs b/src/rbtree.rs
index ad8f861..f9facfc 100644
--- a/src/rbtree.rs
+++ b/src/rbtree.rs
@@ -14,6 +14,7 @@ use core::cmp::Ordering;
use core::fmt;
use core::mem;
use core::ptr::NonNull;
+use core::sync::atomic::{self, AtomicUsize};
use crate::Bound::{self, Excluded, Included, Unbounded};
@@ -358,6 +359,293 @@ unsafe impl XorLinkedListOps for LinkOps {
}
}
+// =============================================================================
+// AtomicLink
+// =============================================================================
+
+/// Intrusive link that allows an object to be inserted into a
+/// `RBTree`. This link allows the structure to be shared between threads.
+
+#[repr(align(2))]
+pub struct AtomicLink {
+ left: Cell<Option<NonNull<AtomicLink>>>,
+ right: Cell<Option<NonNull<AtomicLink>>>,
+ parent_color: AtomicUsize,
+}
+
+impl AtomicLink {
+ #[inline]
+ /// Creates a new `AtomicLink`.
+ pub const fn new() -> AtomicLink {
+ AtomicLink {
+ left: Cell::new(None),
+ right: Cell::new(None),
+ parent_color: AtomicUsize::new(UNLINKED_MARKER),
+ }
+ }
+
+ /// Checks whether the `AtomicLink` is linked into a `RBTree`.
+ #[inline]
+ pub fn is_linked(&self) -> bool {
+ self.parent_color.load(atomic::Ordering::Relaxed) != UNLINKED_MARKER
+ }
+
+ /// Forcibly unlinks an object from a `RBTree`.
+ ///
+ /// # Safety
+ ///
+ /// It is undefined behavior to call this function while still linked into a
+ /// `RBTree`. The only situation where this function is useful is
+ /// after calling `fast_clear` on a `RBTree`, since this clears
+ /// the collection without marking the nodes as unlinked.
+ #[inline]
+ pub unsafe fn force_unlink(&self) {
+ self.parent_color
+ .store(UNLINKED_MARKER, atomic::Ordering::Release);
+ }
+
+ /// Access `parent_color` in an exclusive context.
+ ///
+ /// # Safety
+ ///
+ /// This can only be called after `acquire_link` has been succesfully called.
+ #[inline]
+ unsafe fn parent_color_exclusive(&self) -> &Cell<usize> {
+ // This is safe because currently AtomicUsize has the same representation Cell<usize>.
+ core::mem::transmute(&self.parent_color)
+ }
+}
+
+impl DefaultLinkOps for AtomicLink {
+ type Ops = AtomicLinkOps;
+
+ const NEW: Self::Ops = AtomicLinkOps;
+}
+
+// An object containing a link can be sent to another thread since `acquire_link` is atomic.
+unsafe impl Send for AtomicLink {}
+
+// An object containing a link can be shared between threads since `acquire_link` is atomic.
+unsafe impl Sync for AtomicLink {}
+
+impl Clone for AtomicLink {
+ #[inline]
+ fn clone(&self) -> AtomicLink {
+ AtomicLink::new()
+ }
+}
+
+impl Default for AtomicLink {
+ #[inline]
+ fn default() -> AtomicLink {
+ AtomicLink::new()
+ }
+}
+
+// Provide an implementation of Debug so that structs containing a link can
+// still derive Debug.
+impl fmt::Debug for AtomicLink {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // There isn't anything sensible to print here except whether the link
+ // is currently in a list.
+ if self.is_linked() {
+ write!(f, "linked")
+ } else {
+ write!(f, "unlinked")
+ }
+ }
+}
+
+// =============================================================================
+// AtomicLinkOps
+// =============================================================================
+
+/// Default `LinkOps` implementation for `RBTree`.
+#[derive(Clone, Copy, Default)]
+pub struct AtomicLinkOps;
+
+impl AtomicLinkOps {
+ #[inline]
+ unsafe fn set_parent_color(
+ self,
+ ptr: <Self as link_ops::LinkOps>::LinkPtr,
+ parent: Option<<Self as link_ops::LinkOps>::LinkPtr>,
+ color: Color,
+ ) {
+ assert!(mem::align_of::<Link>() >= 2);
+ let bit = match color {
+ Color::Red => 0,
+ Color::Black => 1,
+ };
+ let parent_usize = parent.map(|x| x.as_ptr() as usize).unwrap_or(0);
+ ptr.as_ref()
+ .parent_color_exclusive()
+ .set((parent_usize & !1) | bit);
+ }
+}
+
+const LINKED_DEFAULT_VALUE: usize = 1;
+
+unsafe impl link_ops::LinkOps for AtomicLinkOps {
+ type LinkPtr = NonNull<AtomicLink>;
+
+ #[inline]
+ unsafe fn acquire_link(&mut self, ptr: Self::LinkPtr) -> bool {
+ ptr.as_ref()
+ .parent_color
+ .compare_exchange(
+ UNLINKED_MARKER,
+ LINKED_DEFAULT_VALUE,
+ atomic::Ordering::Acquire,
+ atomic::Ordering::Relaxed,
+ )
+ .is_ok()
+ }
+
+ #[inline]
+ unsafe fn release_link(&mut self, ptr: Self::LinkPtr) {
+ ptr.as_ref()
+ .parent_color
+ .store(UNLINKED_MARKER, atomic::Ordering::Release)
+ }
+}
+
+unsafe impl RBTreeOps for AtomicLinkOps {
+ #[inline]
+ unsafe fn left(&self, ptr: Self::LinkPtr) -> Option<Self::LinkPtr> {
+ ptr.as_ref().left.get()
+ }
+
+ #[inline]
+ unsafe fn right(&self, ptr: Self::LinkPtr) -> Option<Self::LinkPtr> {
+ ptr.as_ref().right.get()
+ }
+
+ #[inline]
+ unsafe fn parent(&self, ptr: Self::LinkPtr) -> Option<Self::LinkPtr> {
+ let parent_usize = ptr.as_ref().parent_color_exclusive().get() & !1;
+ NonNull::new(parent_usize as *mut AtomicLink)
+ }
+
+ #[inline]
+ unsafe fn color(&self, ptr: Self::LinkPtr) -> Color {
+ if ptr.as_ref().parent_color_exclusive().get() & 1 == 1 {
+ Color::Black
+ } else {
+ Color::Red
+ }
+ }
+
+ #[inline]
+ unsafe fn set_left(&mut self, ptr: Self::LinkPtr, left: Option<Self::LinkPtr>) {
+ ptr.as_ref().left.set(left);
+ }
+
+ #[inline]
+ unsafe fn set_right(&mut self, ptr: Self::LinkPtr, right: Option<Self::LinkPtr>) {
+ ptr.as_ref().right.set(right);
+ }
+
+ #[inline]
+ unsafe fn set_parent(&mut self, ptr: Self::LinkPtr, parent: Option<Self::LinkPtr>) {
+ self.set_parent_color(ptr, parent, self.color(ptr));
+ }
+
+ #[inline]
+ unsafe fn set_color(&mut self, ptr: Self::LinkPtr, color: Color) {
+ self.set_parent_color(ptr, self.parent(ptr), color);
+ }
+}
+
+unsafe impl SinglyLinkedListOps for AtomicLinkOps {
+ #[inline]
+ unsafe fn next(&self, ptr: Self::LinkPtr) -> Option<Self::LinkPtr> {
+ self.right(ptr)
+ }
+
+ #[inline]
+ unsafe fn set_next(&mut self, ptr: Self::LinkPtr, next: Option<Self::LinkPtr>) {
+ self.set_right(ptr, next);
+ }
+}
+
+unsafe impl LinkedListOps for AtomicLinkOps {
+ #[inline]
+ unsafe fn next(&self, ptr: Self::LinkPtr) -> Option<Self::LinkPtr> {
+ self.right(ptr)
+ }
+
+ #[inline]
+ unsafe fn prev(&self, ptr: Self::LinkPtr) -> Option<Self::LinkPtr> {
+ self.left(ptr)
+ }
+
+ #[inline]
+ unsafe fn set_next(&mut self, ptr: Self::LinkPtr, next: Option<Self::LinkPtr>) {
+ self.set_right(ptr, next);
+ }
+
+ #[inline]
+ unsafe fn set_prev(&mut self, ptr: Self::LinkPtr, prev: Option<Self::LinkPtr>) {
+ self.set_left(ptr, prev);
+ }
+}
+
+unsafe impl XorLinkedListOps for AtomicLinkOps {
+ #[inline]
+ unsafe fn next(
+ &self,
+ ptr: Self::LinkPtr,
+ prev: Option<Self::LinkPtr>,
+ ) -> Option<Self::LinkPtr> {
+ let packed = self.right(ptr).map(|x| x.as_ptr() as usize).unwrap_or(0);
+ let raw = packed ^ prev.map(|x| x.as_ptr() as usize).unwrap_or(0);
+ NonNull::new(raw as *mut _)
+ }
+
+ #[inline]
+ unsafe fn prev(
+ &self,
+ ptr: Self::LinkPtr,
+ next: Option<Self::LinkPtr>,
+ ) -> Option<Self::LinkPtr> {
+ let packed = self.right(ptr).map(|x| x.as_ptr() as usize).unwrap_or(0);
+ let raw = packed ^ next.map(|x| x.as_ptr() as usize).unwrap_or(0);
+ NonNull::new(raw as *mut _)
+ }
+
+ #[inline]
+ unsafe fn set(
+ &mut self,
+ ptr: Self::LinkPtr,
+ prev: Option<Self::LinkPtr>,
+ next: Option<Self::LinkPtr>,
+ ) {
+ let new_packed = prev.map(|x| x.as_ptr() as usize).unwrap_or(0)
+ ^ next.map(|x| x.as_ptr() as usize).unwrap_or(0);
+
+ let new_next = NonNull::new(new_packed as *mut _);
+ self.set_right(ptr, new_next);
+ }
+
+ #[inline]
+ unsafe fn replace_next_or_prev(
+ &mut self,
+ ptr: Self::LinkPtr,
+ old: Option<Self::LinkPtr>,
+ new: Option<Self::LinkPtr>,
+ ) {
+ let packed = self.right(ptr).map(|x| x.as_ptr() as usize).unwrap_or(0);
+ let new_packed = packed
+ ^ old.map(|x| x.as_ptr() as usize).unwrap_or(0)
+ ^ new.map(|x| x.as_ptr() as usize).unwrap_or(0);
+
+ let new_next = NonNull::new(new_packed as *mut _);
+ self.set_right(ptr, new_next);
+ }
+}
+
#[inline]
unsafe fn is_left_child<T: RBTreeOps>(link_ops: &T, ptr: T::LinkPtr, parent: T::LinkPtr) -> bool {
link_ops.left(parent) == Some(ptr)
diff --git a/src/singly_linked_list.rs b/src/singly_linked_list.rs
index 29feeab..fa93e10 100644
--- a/src/singly_linked_list.rs
+++ b/src/singly_linked_list.rs
@@ -10,7 +10,8 @@
use core::cell::Cell;
use core::fmt;
-use core::ptr::NonNull;
+use core::ptr::{null_mut, NonNull};
+use core::sync::atomic::{AtomicPtr, Ordering};
use crate::link_ops::{self, DefaultLinkOps};
use crate::pointer_ops::PointerOps;
@@ -230,6 +231,213 @@ unsafe impl XorLinkedListOps for LinkOps {
}
}
+// =============================================================================
+// AtomicLink
+// =============================================================================
+
+/// Intrusive link that allows an object to be inserted into a
+/// `SinglyLinkedList`. This link allows the structure to be shared between threads.
+#[repr(align(2))]
+pub struct AtomicLink {
+ next: AtomicPtr<AtomicLink>,
+}
+const ATOMIC_UNLINKED_MARKER: *mut AtomicLink = 1 as *mut AtomicLink;
+
+impl AtomicLink {
+ /// Creates a new `AtomicLink`.
+ #[inline]
+ pub const fn new() -> AtomicLink {
+ AtomicLink {
+ next: AtomicPtr::new(ATOMIC_UNLINKED_MARKER),
+ }
+ }
+
+ /// Checks whether the `AtomicLink` is linked into a `SinglyLinkedList`.
+ #[inline]
+ pub fn is_linked(&self) -> bool {
+ self.next.load(Ordering::Relaxed) != ATOMIC_UNLINKED_MARKER
+ }
+
+ /// Forcibly unlinks an object from a `SinglyLinkedList`.
+ ///
+ /// # Safety
+ ///
+ /// It is undefined behavior to call this function while still linked into a
+ /// `SinglyLinkedList`. The only situation where this function is useful is
+ /// after calling `fast_clear` on a `SinglyLinkedList`, since this clears
+ /// the collection without marking the nodes as unlinked.
+ #[inline]
+ pub unsafe fn force_unlink(&self) {
+ self.next.store(ATOMIC_UNLINKED_MARKER, Ordering::Release);
+ }
+
+ /// Access the `next` pointer in an exclusive context.
+ ///
+ /// # Safety
+ ///
+ /// This can only be called after `acquire_link` has been succesfully called.
+ #[inline]
+ unsafe fn next_exclusive(&self) -> &Cell<Option<NonNull<AtomicLink>>> {
+ // This is safe because currently AtomicPtr<AtomicLink> has the same representation Cell<Option<NonNull<AtomicLink>>>.
+ core::mem::transmute(&self.next)
+ }
+}
+
+impl DefaultLinkOps for AtomicLink {
+ type Ops = AtomicLinkOps;
+
+ const NEW: Self::Ops = AtomicLinkOps;
+}
+
+// An object containing a link can be sent to another thread since `acquire_link` is atomic.
+unsafe impl Send for AtomicLink {}
+
+// An object containing a link can be shared between threads since `acquire_link` is atomic.
+unsafe impl Sync for AtomicLink {}
+
+impl Clone for AtomicLink {
+ #[inline]
+ fn clone(&self) -> AtomicLink {
+ AtomicLink::new()
+ }
+}
+
+impl Default for AtomicLink {
+ #[inline]
+ fn default() -> AtomicLink {
+ AtomicLink::new()
+ }
+}
+
+// Provide an implementation of Debug so that structs containing a link can
+// still derive Debug.
+impl fmt::Debug for AtomicLink {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // There isn't anything sensible to print here except whether the link
+ // is currently in a list.
+ if self.is_linked() {
+ write!(f, "linked")
+ } else {
+ write!(f, "unlinked")
+ }
+ }
+}
+
+// =============================================================================
+// AtomicLinkOps
+// =============================================================================
+
+/// Default `AtomicLinkOps` implementation for `LinkedList`.
+#[derive(Clone, Copy, Default)]
+pub struct AtomicLinkOps;
+
+unsafe impl link_ops::LinkOps for AtomicLinkOps {
+ type LinkPtr = NonNull<AtomicLink>;
+
+ #[inline]
+ unsafe fn acquire_link(&mut self, ptr: Self::LinkPtr) -> bool {
+ ptr.as_ref()
+ .next
+ .compare_exchange(
+ ATOMIC_UNLINKED_MARKER,
+ null_mut(),
+ Ordering::Acquire,
+ Ordering::Relaxed,
+ )
+ .is_ok()
+ }
+
+ #[inline]
+ unsafe fn release_link(&mut self, ptr: Self::LinkPtr) {
+ ptr.as_ref()
+ .next
+ .store(ATOMIC_UNLINKED_MARKER, Ordering::Release)
+ }
+}
+
+unsafe impl SinglyLinkedListOps for AtomicLinkOps {
+ #[inline]
+ unsafe fn next(&self, ptr: Self::LinkPtr) -> Option<Self::LinkPtr> {
+ ptr.as_ref().next_exclusive().get()
+ }
+
+ #[inline]
+ unsafe fn set_next(&mut self, ptr: Self::LinkPtr, next: Option<Self::LinkPtr>) {
+ ptr.as_ref().next_exclusive().set(next);
+ }
+}
+
+unsafe impl XorLinkedListOps for AtomicLinkOps {
+ #[inline]
+ unsafe fn next(
+ &self,
+ ptr: Self::LinkPtr,
+ prev: Option<Self::LinkPtr>,
+ ) -> Option<Self::LinkPtr> {
+ let packed = ptr
+ .as_ref()
+ .next_exclusive()
+ .get()
+ .map(|x| x.as_ptr() as usize)
+ .unwrap_or(0);
+ let raw = packed ^ prev.map(|x| x.as_ptr() as usize).unwrap_or(0);
+
+ NonNull::new(raw as *mut _)
+ }
+
+ #[inline]
+ unsafe fn prev(
+ &self,
+ ptr: Self::LinkPtr,
+ next: Option<Self::LinkPtr>,
+ ) -> Option<Self::LinkPtr> {
+ let packed = ptr
+ .as_ref()
+ .next_exclusive()
+ .get()
+ .map(|x| x.as_ptr() as usize)
+ .unwrap_or(0);
+ let raw = packed ^ next.map(|x| x.as_ptr() as usize).unwrap_or(0);
+ NonNull::new(raw as *mut _)
+ }
+
+ #[inline]
+ unsafe fn set(
+ &mut self,
+ ptr: Self::LinkPtr,
+ prev: Option<Self::LinkPtr>,
+ next: Option<Self::LinkPtr>,
+ ) {
+ let new_packed = prev.map(|x| x.as_ptr() as usize).unwrap_or(0)
+ ^ next.map(|x| x.as_ptr() as usize).unwrap_or(0);
+
+ let new_next = NonNull::new(new_packed as *mut _);
+ ptr.as_ref().next_exclusive().set(new_next);
+ }
+
+ #[inline]
+ unsafe fn replace_next_or_prev(
+ &mut self,
+ ptr: Self::LinkPtr,
+ old: Option<Self::LinkPtr>,
+ new: Option<Self::LinkPtr>,
+ ) {
+ let packed = ptr
+ .as_ref()
+ .next_exclusive()
+ .get()
+ .map(|x| x.as_ptr() as usize)
+ .unwrap_or(0);
+ let new_packed = packed
+ ^ old.map(|x| x.as_ptr() as usize).unwrap_or(0)
+ ^ new.map(|x| x.as_ptr() as usize).unwrap_or(0);
+
+ let new_next = NonNull::new(new_packed as *mut _);
+ ptr.as_ref().next_exclusive().set(new_next);
+ }
+}
+
#[inline]
unsafe fn link_between<T: SinglyLinkedListOps>(
link_ops: &mut T,
diff --git a/src/xor_linked_list.rs b/src/xor_linked_list.rs
index 4e53ff4..cb0e177 100644
--- a/src/xor_linked_list.rs
+++ b/src/xor_linked_list.rs
@@ -14,6 +14,7 @@
use core::cell::Cell;
use core::fmt;
use core::ptr::NonNull;
+use core::sync::atomic::{AtomicUsize, Ordering};
use crate::link_ops::{self, DefaultLinkOps};
use crate::pointer_ops::PointerOps;
@@ -255,6 +256,197 @@ unsafe impl SinglyLinkedListOps for LinkOps {
}
}
+// =============================================================================
+// AtomicLink
+// =============================================================================
+
+/// Intrusive link that allows an object to be inserted into a
+/// `XorLinkedList`. This link allows the structure to be shared between threads.
+#[repr(align(2))]
+pub struct AtomicLink {
+ packed: AtomicUsize,
+}
+
+impl AtomicLink {
+ /// Creates a new `Link`.
+ #[inline]
+ pub const fn new() -> AtomicLink {
+ AtomicLink {
+ packed: AtomicUsize::new(UNLINKED_MARKER),
+ }
+ }
+
+ /// Checks whether the `Link` is linked into a `XorLinkedList`.
+ #[inline]
+ pub fn is_linked(&self) -> bool {
+ self.packed.load(core::sync::atomic::Ordering::Relaxed) != UNLINKED_MARKER
+ }
+
+ /// Forcibly unlinks an object from a `XorLinkedList`.
+ ///
+ /// # Safety
+ ///
+ /// It is undefined behavior to call this function while still linked into a
+ /// `XorLinkedList`. The only situation where this function is useful is
+ /// after calling `fast_clear` on a `XorLinkedList`, since this clears
+ /// the collection without marking the nodes as unlinked.
+ #[inline]
+ pub unsafe fn force_unlink(&self) {
+ self.packed.store(UNLINKED_MARKER, Ordering::Release);
+ }
+
+ /// Access the `packed` pointer in an exclusive context.
+ ///
+ /// # Safety
+ ///
+ /// This can only be called after `acquire_link` has been succesfully called.
+ #[inline]
+ unsafe fn packed_exclusive(&self) -> &Cell<usize> {
+ // This is safe because currently AtomicUsize has the same representation Cell<usize>.
+ core::mem::transmute(&self.packed)
+ }
+}
+
+impl DefaultLinkOps for AtomicLink {
+ type Ops = AtomicLinkOps;
+
+ const NEW: Self::Ops = AtomicLinkOps;
+}
+
+// An object containing a link can be sent to another thread since `acquire_link` is atomic.
+unsafe impl Send for AtomicLink {}
+
+// An object containing a link can be shared between threads since `acquire_link` is atomic.
+unsafe impl Sync for AtomicLink {}
+
+impl Clone for AtomicLink {
+ #[inline]
+ fn clone(&self) -> AtomicLink {
+ AtomicLink::new()
+ }
+}
+
+impl Default for AtomicLink {
+ #[inline]
+ fn default() -> AtomicLink {
+ AtomicLink::new()
+ }
+}
+
+// Provide an implementation of Debug so that structs containing a link can
+// still derive Debug.
+impl fmt::Debug for AtomicLink {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // There isn't anything sensible to print here except whether the link
+ // is currently in a list.
+ if self.is_linked() {
+ write!(f, "linked")
+ } else {
+ write!(f, "unlinked")
+ }
+ }
+}
+
+// =============================================================================
+// AtomicLinkOps
+// =============================================================================
+
+/// Default `AtomicLinkOps` implementation for `LinkedList`.
+#[derive(Clone, Copy, Default)]
+pub struct AtomicLinkOps;
+
+const LINKED_DEFAULT_VALUE: usize = 0;
+
+unsafe impl link_ops::LinkOps for AtomicLinkOps {
+ type LinkPtr = NonNull<AtomicLink>;
+
+ #[inline]
+ unsafe fn acquire_link(&mut self, ptr: Self::LinkPtr) -> bool {
+ ptr.as_ref()
+ .packed
+ .compare_exchange(
+ UNLINKED_MARKER,
+ LINKED_DEFAULT_VALUE,
+ Ordering::Acquire,
+ Ordering::Relaxed,
+ )
+ .is_ok()
+ }
+
+ #[inline]
+ unsafe fn release_link(&mut self, ptr: Self::LinkPtr) {
+ ptr.as_ref()
+ .packed
+ .store(UNLINKED_MARKER, Ordering::Release)
+ }
+}
+
+unsafe impl XorLinkedListOps for AtomicLinkOps {
+ #[inline]
+ unsafe fn next(
+ &self,
+ ptr: Self::LinkPtr,
+ prev: Option<Self::LinkPtr>,
+ ) -> Option<Self::LinkPtr> {
+ let raw =
+ ptr.as_ref().packed_exclusive().get() ^ prev.map(|x| x.as_ptr() as usize).unwrap_or(0);
+ NonNull::new(raw as *mut _)
+ }
+
+ #[inline]
+ unsafe fn prev(
+ &self,
+ ptr: Self::LinkPtr,
+ next: Option<Self::LinkPtr>,
+ ) -> Option<Self::LinkPtr> {
+ let raw =
+ ptr.as_ref().packed_exclusive().get() ^ next.map(|x| x.as_ptr() as usize).unwrap_or(0);
+ NonNull::new(raw as *mut _)
+ }
+
+ #[inline]
+ unsafe fn set(
+ &mut self,
+ ptr: Self::LinkPtr,
+ prev: Option<Self::LinkPtr>,
+ next: Option<Self::LinkPtr>,
+ ) {
+ let new_packed = prev.map(|x| x.as_ptr() as usize).unwrap_or(0)
+ ^ next.map(|x| x.as_ptr() as usize).unwrap_or(0);
+ ptr.as_ref().packed_exclusive().set(new_packed);
+ }
+
+ #[inline]
+ unsafe fn replace_next_or_prev(
+ &mut self,
+ ptr: Self::LinkPtr,
+ old: Option<Self::LinkPtr>,
+ new: Option<Self::LinkPtr>,
+ ) {
+ let new_packed = ptr.as_ref().packed_exclusive().get()
+ ^ old.map(|x| x.as_ptr() as usize).unwrap_or(0)
+ ^ new.map(|x| x.as_ptr() as usize).unwrap_or(0);
+
+ ptr.as_ref().packed_exclusive().set(new_packed);
+ }
+}
+
+unsafe impl SinglyLinkedListOps for AtomicLinkOps {
+ #[inline]
+ unsafe fn next(&self, ptr: Self::LinkPtr) -> Option<Self::LinkPtr> {
+ let raw = ptr.as_ref().packed_exclusive().get();
+ NonNull::new(raw as *mut _)
+ }
+
+ #[inline]
+ unsafe fn set_next(&mut self, ptr: Self::LinkPtr, next: Option<Self::LinkPtr>) {
+ ptr.as_ref()
+ .packed_exclusive()
+ .set(next.map(|x| x.as_ptr() as usize).unwrap_or(0));
+ }
+}
+
#[inline]
unsafe fn link_between<T: XorLinkedListOps>(
link_ops: &mut T,