aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoel Galenson <jgalenson@google.com>2021-05-20 13:49:13 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2021-05-20 13:49:13 +0000
commitdd73a2429bde3bdde0b7aa93bed55037442c8131 (patch)
tree4c96b5958596e7a1c93876d817da76c568e20ddd
parent9ec1282509e4fa762f8c29f81d3e8ea52d4b7b9f (diff)
parent4ec4df44521b2a3b0dd8b643ff1f3bb5be5e2c62 (diff)
downloadintrusive-collections-dd73a2429bde3bdde0b7aa93bed55037442c8131.tar.gz
Upgrade rust/crates/intrusive-collections to 0.9.1 am: 99d7a5188a am: 3cf06e04e5 am: 6c6796b9a7 am: 4ec4df4452
Original change: https://android-review.googlesource.com/c/platform/external/rust/crates/intrusive-collections/+/1712909 Change-Id: Idf41267541bc0266e16b9b4e8b90e93bf10a6920
-rw-r--r--.cargo_vcs_info.json2
-rw-r--r--.travis.yml2
-rw-r--r--Cargo.toml2
-rw-r--r--Cargo.toml.orig2
-rw-r--r--METADATA10
-rw-r--r--TEST_MAPPING11
-rw-r--r--src/lib.rs2
-rw-r--r--src/pointer_ops.rs197
-rw-r--r--src/xor_linked_list.rs35
9 files changed, 253 insertions, 10 deletions
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
index c5f55b1..b1ec331 100644
--- a/.cargo_vcs_info.json
+++ b/.cargo_vcs_info.json
@@ -1,5 +1,5 @@
{
"git": {
- "sha1": "1cc39624ea4e69f4741f9179e780eecc763423d6"
+ "sha1": "dc12677569f654e54bf274ffeefb615095d445c1"
}
}
diff --git a/.travis.yml b/.travis.yml
index 1ebea6e..93007e1 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -12,7 +12,7 @@ rust:
- nightly
- beta
- stable
-- 1.36.0
+- 1.39.0
before_script:
- |
diff --git a/Cargo.toml b/Cargo.toml
index 08ed56d..7c2cbdf 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -13,7 +13,7 @@
[package]
edition = "2018"
name = "intrusive-collections"
-version = "0.9.0"
+version = "0.9.1"
authors = ["Amanieu d'Antras <amanieu@gmail.com>"]
description = "Intrusive collections for Rust (linked list and red-black tree)"
documentation = "https://docs.rs/intrusive-collections"
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index ffa9b83..3df8f20 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,6 +1,6 @@
[package]
name = "intrusive-collections"
-version = "0.9.0"
+version = "0.9.1"
authors = ["Amanieu d'Antras <amanieu@gmail.com>"]
description = "Intrusive collections for Rust (linked list and red-black tree)"
documentation = "https://docs.rs/intrusive-collections"
diff --git a/METADATA b/METADATA
index ffc6a09..f1f5d9b 100644
--- a/METADATA
+++ b/METADATA
@@ -7,13 +7,13 @@ third_party {
}
url {
type: ARCHIVE
- value: "https://static.crates.io/crates/intrusive-collections/intrusive-collections-0.9.0.crate"
+ value: "https://static.crates.io/crates/intrusive-collections/intrusive-collections-0.9.1.crate"
}
- version: "0.9.0"
+ version: "0.9.1"
license_type: NOTICE
last_upgrade_date {
- year: 2020
- month: 12
- day: 15
+ year: 2021
+ month: 5
+ day: 19
}
}
diff --git a/TEST_MAPPING b/TEST_MAPPING
new file mode 100644
index 0000000..d859886
--- /dev/null
+++ b/TEST_MAPPING
@@ -0,0 +1,11 @@
+// Generated by update_crate_tests.py for tests that depend on this crate.
+{
+ "presubmit": [
+ {
+ "name": "ZipFuseTest"
+ },
+ {
+ "name": "authfs_device_test_src_lib"
+ }
+ ]
+}
diff --git a/src/lib.rs b/src/lib.rs
index 928b337..b873910 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -267,7 +267,7 @@
#![warn(missing_docs)]
#![warn(rust_2018_idioms)]
#![no_std]
-#![cfg_attr(feature = "nightly", feature(const_fn))]
+#![cfg_attr(feature = "nightly", feature(const_fn_trait_bound))]
#![allow(clippy::declare_interior_mutable_const, clippy::collapsible_if)]
#[cfg(feature = "alloc")]
diff --git a/src/pointer_ops.rs b/src/pointer_ops.rs
index 74285ef..716ac15 100644
--- a/src/pointer_ops.rs
+++ b/src/pointer_ops.rs
@@ -15,6 +15,7 @@ use crate::UnsafeRef;
use core::marker::PhantomData;
use core::mem::ManuallyDrop;
use core::ops::Deref;
+use core::pin::Pin;
/// Trait for pointer conversion operations.
///
@@ -84,6 +85,21 @@ unsafe impl<'a, T: ?Sized> PointerOps for DefaultPointerOps<&'a T> {
}
}
+unsafe impl<'a, T: ?Sized> PointerOps for DefaultPointerOps<Pin<&'a T>> {
+ type Value = T;
+ type Pointer = Pin<&'a T>;
+
+ #[inline]
+ unsafe fn from_raw(&self, raw: *const T) -> Pin<&'a T> {
+ Pin::new_unchecked(&*raw)
+ }
+
+ #[inline]
+ fn into_raw(&self, ptr: Pin<&'a T>) -> *const T {
+ unsafe { Pin::into_inner_unchecked(ptr) as *const T }
+ }
+}
+
unsafe impl<T: ?Sized> PointerOps for DefaultPointerOps<UnsafeRef<T>> {
type Value = T;
type Pointer = UnsafeRef<T>;
@@ -99,6 +115,21 @@ unsafe impl<T: ?Sized> PointerOps for DefaultPointerOps<UnsafeRef<T>> {
}
}
+unsafe impl<T: ?Sized> PointerOps for DefaultPointerOps<Pin<UnsafeRef<T>>> {
+ type Value = T;
+ type Pointer = Pin<UnsafeRef<T>>;
+
+ #[inline]
+ unsafe fn from_raw(&self, raw: *const T) -> Pin<UnsafeRef<T>> {
+ Pin::new_unchecked(UnsafeRef::from_raw(raw as *mut T))
+ }
+
+ #[inline]
+ fn into_raw(&self, ptr: Pin<UnsafeRef<T>>) -> *const T {
+ UnsafeRef::into_raw(unsafe { Pin::into_inner_unchecked(ptr) }) as *const T
+ }
+}
+
#[cfg(feature = "alloc")]
unsafe impl<T: ?Sized> PointerOps for DefaultPointerOps<Box<T>> {
type Value = T;
@@ -116,6 +147,22 @@ unsafe impl<T: ?Sized> PointerOps for DefaultPointerOps<Box<T>> {
}
#[cfg(feature = "alloc")]
+unsafe impl<T: ?Sized> PointerOps for DefaultPointerOps<Pin<Box<T>>> {
+ type Value = T;
+ type Pointer = Pin<Box<T>>;
+
+ #[inline]
+ unsafe fn from_raw(&self, raw: *const T) -> Pin<Box<T>> {
+ Pin::new_unchecked(Box::from_raw(raw as *mut T))
+ }
+
+ #[inline]
+ fn into_raw(&self, ptr: Pin<Box<T>>) -> *const T {
+ Box::into_raw(unsafe { Pin::into_inner_unchecked(ptr) }) as *const T
+ }
+}
+
+#[cfg(feature = "alloc")]
unsafe impl<T: ?Sized> PointerOps for DefaultPointerOps<Rc<T>> {
type Value = T;
type Pointer = Rc<T>;
@@ -132,6 +179,22 @@ unsafe impl<T: ?Sized> PointerOps for DefaultPointerOps<Rc<T>> {
}
#[cfg(feature = "alloc")]
+unsafe impl<T: ?Sized> PointerOps for DefaultPointerOps<Pin<Rc<T>>> {
+ type Value = T;
+ type Pointer = Pin<Rc<T>>;
+
+ #[inline]
+ unsafe fn from_raw(&self, raw: *const T) -> Pin<Rc<T>> {
+ Pin::new_unchecked(Rc::from_raw(raw))
+ }
+
+ #[inline]
+ fn into_raw(&self, ptr: Pin<Rc<T>>) -> *const T {
+ Rc::into_raw(unsafe { Pin::into_inner_unchecked(ptr) })
+ }
+}
+
+#[cfg(feature = "alloc")]
unsafe impl<T: ?Sized> PointerOps for DefaultPointerOps<Arc<T>> {
type Value = T;
type Pointer = Arc<T>;
@@ -147,6 +210,22 @@ unsafe impl<T: ?Sized> PointerOps for DefaultPointerOps<Arc<T>> {
}
}
+#[cfg(feature = "alloc")]
+unsafe impl<T: ?Sized> PointerOps for DefaultPointerOps<Pin<Arc<T>>> {
+ type Value = T;
+ type Pointer = Pin<Arc<T>>;
+
+ #[inline]
+ unsafe fn from_raw(&self, raw: *const T) -> Pin<Arc<T>> {
+ Pin::new_unchecked(Arc::from_raw(raw))
+ }
+
+ #[inline]
+ fn into_raw(&self, ptr: Pin<Arc<T>>) -> *const T {
+ Arc::into_raw(unsafe { Pin::into_inner_unchecked(ptr) })
+ }
+}
+
/// Clones a `PointerOps::Pointer` from a `*const PointerOps::Value`
///
/// This method is only safe to call if the raw pointer is known to be
@@ -192,6 +271,7 @@ mod tests {
use std::boxed::Box;
use std::fmt::Debug;
use std::mem;
+ use std::pin::Pin;
use std::rc::Rc;
use std::sync::Arc;
@@ -311,4 +391,121 @@ mod tests {
assert_eq!(2, Rc::strong_count(&p2));
}
}
+
+ #[test]
+ fn test_pin_box() {
+ unsafe {
+ let pointer_ops = DefaultPointerOps::<Pin<Box<_>>>::new();
+ let p = Pin::new(Box::new(1));
+ let a: *const i32 = &*p;
+ let r = pointer_ops.into_raw(p);
+ assert_eq!(a, r);
+ let p2: Pin<Box<i32>> = pointer_ops.from_raw(r);
+ let a2: *const i32 = &*p2;
+ assert_eq!(a, a2);
+ }
+ }
+
+ #[test]
+ fn test_pin_rc() {
+ unsafe {
+ let pointer_ops = DefaultPointerOps::<Pin<Rc<_>>>::new();
+ let p = Pin::new(Rc::new(1));
+ let a: *const i32 = &*p;
+ let r = pointer_ops.into_raw(p);
+ assert_eq!(a, r);
+ let p2: Pin<Rc<i32>> = pointer_ops.from_raw(r);
+ let a2: *const i32 = &*p2;
+ assert_eq!(a, a2);
+ }
+ }
+
+ #[test]
+ fn test_pin_arc() {
+ unsafe {
+ let pointer_ops = DefaultPointerOps::<Pin<Arc<_>>>::new();
+ let p = Pin::new(Arc::new(1));
+ let a: *const i32 = &*p;
+ let r = pointer_ops.into_raw(p);
+ assert_eq!(a, r);
+ let p2: Pin<Arc<i32>> = pointer_ops.from_raw(r);
+ let a2: *const i32 = &*p2;
+ assert_eq!(a, a2);
+ }
+ }
+
+ #[test]
+ fn test_pin_box_unsized() {
+ unsafe {
+ let pointer_ops = DefaultPointerOps::<Pin<Box<_>>>::new();
+ let p = Pin::new(Box::new(1)) as Pin<Box<dyn Debug>>;
+ let a: *const dyn Debug = &*p;
+ let b: (usize, usize) = mem::transmute(a);
+ let r = pointer_ops.into_raw(p);
+ assert_eq!(a, r);
+ assert_eq!(b, mem::transmute(r));
+ let p2: Pin<Box<dyn Debug>> = pointer_ops.from_raw(r);
+ let a2: *const dyn Debug = &*p2;
+ assert_eq!(a, a2);
+ assert_eq!(b, mem::transmute(a2));
+ }
+ }
+
+ #[test]
+ fn test_pin_rc_unsized() {
+ unsafe {
+ let pointer_ops = DefaultPointerOps::<Pin<Rc<_>>>::new();
+ let p = Pin::new(Rc::new(1)) as Pin<Rc<dyn Debug>>;
+ let a: *const dyn Debug = &*p;
+ let b: (usize, usize) = mem::transmute(a);
+ let r = pointer_ops.into_raw(p);
+ assert_eq!(a, r);
+ assert_eq!(b, mem::transmute(r));
+ let p2: Pin<Rc<dyn Debug>> = pointer_ops.from_raw(r);
+ let a2: *const dyn Debug = &*p2;
+ assert_eq!(a, a2);
+ assert_eq!(b, mem::transmute(a2));
+ }
+ }
+
+ #[test]
+ fn test_pin_arc_unsized() {
+ unsafe {
+ let pointer_ops = DefaultPointerOps::<Pin<Arc<_>>>::new();
+ let p = Pin::new(Arc::new(1)) as Pin<Arc<dyn Debug>>;
+ let a: *const dyn Debug = &*p;
+ let b: (usize, usize) = mem::transmute(a);
+ let r = pointer_ops.into_raw(p);
+ assert_eq!(a, r);
+ assert_eq!(b, mem::transmute(r));
+ let p2: Pin<Arc<dyn Debug>> = pointer_ops.from_raw(r);
+ let a2: *const dyn Debug = &*p2;
+ assert_eq!(a, a2);
+ assert_eq!(b, mem::transmute(a2));
+ }
+ }
+
+ #[test]
+ fn clone_pin_arc_from_raw() {
+ use super::clone_pointer_from_raw;
+ unsafe {
+ let pointer_ops = DefaultPointerOps::<Pin<Arc<_>>>::new();
+ let p = Pin::new(Arc::new(1));
+ let raw = &*p as *const i32;
+ let p2: Pin<Arc<i32>> = clone_pointer_from_raw(&pointer_ops, raw);
+ assert_eq!(2, Arc::strong_count(&Pin::into_inner(p2)));
+ }
+ }
+
+ #[test]
+ fn clone_pin_rc_from_raw() {
+ use super::clone_pointer_from_raw;
+ unsafe {
+ let pointer_ops = DefaultPointerOps::<Pin<Rc<_>>>::new();
+ let p = Pin::new(Rc::new(1));
+ let raw = &*p as *const i32;
+ let p2: Pin<Rc<i32>> = clone_pointer_from_raw(&pointer_ops, raw);
+ assert_eq!(2, Rc::strong_count(&Pin::into_inner(p2)));
+ }
+ }
}
diff --git a/src/xor_linked_list.rs b/src/xor_linked_list.rs
index e0e4f95..4e53ff4 100644
--- a/src/xor_linked_list.rs
+++ b/src/xor_linked_list.rs
@@ -1218,6 +1218,14 @@ where
pub fn pop_back(&mut self) -> Option<<A::PointerOps as PointerOps>::Pointer> {
self.back_mut().remove()
}
+
+ /// Reverses the list in-place.
+ ///
+ /// Due to the structure of `XorLinkedList`, this operation is O(1).
+ #[inline]
+ pub fn reverse(&mut self) {
+ core::mem::swap(&mut self.head, &mut self.tail);
+ }
}
// Allow read-only access to values from multiple threads
@@ -1814,6 +1822,33 @@ mod tests {
}
#[test]
+ fn test_reverse() {
+ let mut l = XorLinkedList::new(ObjAdapter1::new());
+
+ l.push_back(make_obj(1));
+ l.push_back(make_obj(2));
+ l.push_back(make_obj(3));
+ l.push_back(make_obj(4));
+ assert_eq!(l.iter().map(|x| x.value).collect::<Vec<_>>(), [1, 2, 3, 4]);
+
+ l.reverse();
+ assert_eq!(l.iter().map(|x| x.value).collect::<Vec<_>>(), [4, 3, 2, 1]);
+
+ l.push_back(make_obj(5));
+ l.push_back(make_obj(6));
+ assert_eq!(
+ l.iter().map(|x| x.value).collect::<Vec<_>>(),
+ [4, 3, 2, 1, 5, 6]
+ );
+
+ l.reverse();
+ assert_eq!(
+ l.iter().map(|x| x.value).collect::<Vec<_>>(),
+ [6, 5, 1, 2, 3, 4]
+ );
+ }
+
+ #[test]
fn test_non_static() {
#[derive(Clone)]
struct Obj<'a, T> {