aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2022-05-11 05:06:19 +0000
committerAndroid Build Coastguard Worker <android-build-coastguard-worker@google.com>2022-05-11 05:06:19 +0000
commitf34ed323b8fd76732357d736866f2406c566a781 (patch)
tree3c2abb16a61cdcb6c665836da86505adbc9db9e0
parent0249166812f2b30a4b7e29165f25c83932755809 (diff)
parent0927990edf14645203e733b6556a968b7f4177f6 (diff)
downloadhashlink-android13-mainline-media-swcodec-release.tar.gz
Change-Id: I1e1be066f720cad325c7e4ac4bcaed0e11c785e7
-rw-r--r--.cargo_vcs_info.json2
-rw-r--r--.circleci/config.yml10
-rw-r--r--Android.bp9
-rw-r--r--CHANGELOG.md8
-rw-r--r--Cargo.toml7
-rw-r--r--Cargo.toml.orig5
-rw-r--r--METADATA10
-rw-r--r--TEST_MAPPING19
-rw-r--r--cargo2android.json4
-rw-r--r--src/linked_hash_map.rs34
-rw-r--r--src/linked_hash_set.rs10
-rw-r--r--src/serde.rs164
-rw-r--r--tests/linked_hash_map.rs15
-rw-r--r--tests/linked_hash_set.rs15
-rw-r--r--tests/serde.rs49
15 files changed, 257 insertions, 104 deletions
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
index 89ad255..3f828db 100644
--- a/.cargo_vcs_info.json
+++ b/.cargo_vcs_info.json
@@ -1,5 +1,5 @@
{
"git": {
- "sha1": "a244c741ac60333e3c37298fffdbfc1aa5f3c0d6"
+ "sha1": "491feb0b3f9805f7548a459ac32ab24914e12db2"
}
}
diff --git a/.circleci/config.yml b/.circleci/config.yml
index 9116b38..12fde18 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -35,17 +35,17 @@ jobs:
command: cargo build --all --all-targets
- run:
name: Run all tests
- command: cargo test --all
+ command: cargo test --all --all-features
- run:
name: Run all tests under miri
command: |
- cargo +nightly miri test
+ cargo +nightly miri test --all-features
- run:
name: Run all tests under sanitizers
command: |
- RUSTFLAGS="-Z sanitizer=address" cargo +nightly -Z build-std test --target x86_64-unknown-linux-gnu
- RUSTFLAGS="-Z sanitizer=leak" cargo +nightly test -Z build-std --target x86_64-unknown-linux-gnu
- RUSTFLAGS="-Z sanitizer=memory" cargo +nightly test -Z build-std --target x86_64-unknown-linux-gnu
+ RUSTFLAGS="-Z sanitizer=address" cargo +nightly -Z build-std test --target x86_64-unknown-linux-gnu --all-features
+ RUSTFLAGS="-Z sanitizer=leak" cargo +nightly test -Z build-std --target x86_64-unknown-linux-gnu --all-features
+ RUSTFLAGS="-Z sanitizer=memory" cargo +nightly test -Z build-std --target x86_64-unknown-linux-gnu --all-features
- save_cache:
paths:
- /usr/local/cargo/registry
diff --git a/Android.bp b/Android.bp
index f365f34..260ac53 100644
--- a/Android.bp
+++ b/Android.bp
@@ -1,4 +1,5 @@
-// This file is generated by cargo2android.py --device --run --dependencies.
+// This file is generated by cargo2android.py --config cargo2android.json.
+// Do not modify this file as changes will be overridden on upgrade.
package {
default_applicable_licenses: ["external_rust_crates_hashlink_license"],
@@ -40,13 +41,11 @@ rust_library {
name: "libhashlink",
host_supported: true,
crate_name: "hashlink",
+ cargo_env_compat: true,
+ cargo_pkg_version: "0.7.0",
srcs: ["src/lib.rs"],
edition: "2018",
rustlibs: [
"libhashbrown",
],
}
-
-// dependent_library ["feature_list"]
-// ahash-0.4.6
-// hashbrown-0.9.1 "ahash,default,inline-more"
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 12963ea..54c2bbe 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,11 @@
+## [0.7.0
+- API incompatible change: depend on hashbrown 0.11, changes re-exported types.
+- Fix `LinkedHashSet::back` to take `&self` not `&mut self`.
+- API incompatible change: equality tests on `LinkedHashSet` are now *ordered*,
+ similar to `LinkedHashMap`.
+- Make the serde `Deserialize` implementations on `LinkedHashMap` and
+ `LinkedHashSet` generic on the `BuildHasher` type.
+
## [0.6.0]
- API incompatible change: depend on hashbrown 0.9, re-export renamed
hashbrown::TryReserveError type.
diff --git a/Cargo.toml b/Cargo.toml
index 6a5a2d4..92db926 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -13,7 +13,7 @@
[package]
edition = "2018"
name = "hashlink"
-version = "0.6.0"
+version = "0.7.0"
authors = ["kyren <kerriganw@gmail.com>"]
description = "HashMap-like containers that hold their key-value pairs in a user controllable order"
documentation = "https://docs.rs/hashlink"
@@ -22,11 +22,14 @@ keywords = ["data-structures"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/kyren/hashlink"
[dependencies.hashbrown]
-version = "0.9.0"
+version = "0.11.0"
[dependencies.serde]
version = "1.0"
optional = true
+[dev-dependencies.fxhash]
+version = "0.2.1"
+
[dev-dependencies.serde_test]
version = "1.0"
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index 6ad661b..c6de186 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,6 +1,6 @@
[package]
name = "hashlink"
-version = "0.6.0"
+version = "0.7.0"
authors = ["kyren <kerriganw@gmail.com>"]
edition = "2018"
description = "HashMap-like containers that hold their key-value pairs in a user controllable order"
@@ -17,8 +17,9 @@ circle-ci = { repository = "kyren/hashlink", branch = "master" }
serde_impl = ["serde"]
[dependencies]
-hashbrown = "0.9.0"
+hashbrown = "0.11.0"
serde = { version = "1.0", optional = true }
[dev-dependencies]
serde_test = "1.0"
+fxhash = "0.2.1"
diff --git a/METADATA b/METADATA
index d38b627..1aeaa1e 100644
--- a/METADATA
+++ b/METADATA
@@ -7,13 +7,13 @@ third_party {
}
url {
type: ARCHIVE
- value: "https://static.crates.io/crates/hashlink/hashlink-0.6.0.crate"
+ value: "https://static.crates.io/crates/hashlink/hashlink-0.7.0.crate"
}
- version: "0.6.0"
+ version: "0.7.0"
license_type: NOTICE
last_upgrade_date {
- year: 2020
- month: 11
- day: 9
+ year: 2021
+ month: 5
+ day: 19
}
}
diff --git a/TEST_MAPPING b/TEST_MAPPING
new file mode 100644
index 0000000..91f37bf
--- /dev/null
+++ b/TEST_MAPPING
@@ -0,0 +1,19 @@
+// Generated by update_crate_tests.py for tests that depend on this crate.
+{
+ "presubmit": [
+ {
+ "name": "keystore2_test"
+ },
+ {
+ "name": "legacykeystore_test"
+ }
+ ],
+ "presubmit-rust": [
+ {
+ "name": "keystore2_test"
+ },
+ {
+ "name": "legacykeystore_test"
+ }
+ ]
+}
diff --git a/cargo2android.json b/cargo2android.json
new file mode 100644
index 0000000..bf78496
--- /dev/null
+++ b/cargo2android.json
@@ -0,0 +1,4 @@
+{
+ "device": true,
+ "run": true
+} \ No newline at end of file
diff --git a/src/linked_hash_map.rs b/src/linked_hash_map.rs
index 32733ea..191844c 100644
--- a/src/linked_hash_map.rs
+++ b/src/linked_hash_map.rs
@@ -441,6 +441,40 @@ where
}
}
}
+
+ /// If an entry with this key exists, move it to the front of the list and return a reference to
+ /// the value.
+ #[inline]
+ pub fn to_front<Q>(&mut self, k: &Q) -> Option<&mut V>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ match self.raw_entry_mut().from_key(k) {
+ RawEntryMut::Occupied(mut occupied) => {
+ occupied.to_front();
+ Some(occupied.into_mut())
+ }
+ RawEntryMut::Vacant(_) => None,
+ }
+ }
+
+ /// If an entry with this key exists, move it to the back of the list and return a reference to
+ /// the value.
+ #[inline]
+ pub fn to_back<Q>(&mut self, k: &Q) -> Option<&mut V>
+ where
+ K: Borrow<Q>,
+ Q: Hash + Eq + ?Sized,
+ {
+ match self.raw_entry_mut().from_key(k) {
+ RawEntryMut::Occupied(mut occupied) => {
+ occupied.to_back();
+ Some(occupied.into_mut())
+ }
+ RawEntryMut::Vacant(_) => None,
+ }
+ }
}
impl<K, V, S> LinkedHashMap<K, V, S>
diff --git a/src/linked_hash_set.rs b/src/linked_hash_set.rs
index 1ab7dbb..f55f6c5 100644
--- a/src/linked_hash_set.rs
+++ b/src/linked_hash_set.rs
@@ -250,7 +250,7 @@ where
}
#[inline]
- pub fn back(&mut self) -> Option<&T> {
+ pub fn back(&self) -> Option<&T> {
self.map.back().map(|(k, _)| k)
}
@@ -304,12 +304,8 @@ where
S: BuildHasher,
{
#[inline]
- fn eq(&self, other: &LinkedHashSet<T, S>) -> bool {
- if self.len() != other.len() {
- return false;
- }
-
- self.iter().all(|key| other.contains(key))
+ fn eq(&self, other: &Self) -> bool {
+ self.len() == other.len() && self.iter().eq(other)
}
}
diff --git a/src/serde.rs b/src/serde.rs
index b8e307c..f44ebb3 100644
--- a/src/serde.rs
+++ b/src/serde.rs
@@ -31,54 +31,59 @@ where
}
}
-#[derive(Debug)]
-pub struct LinkedHashMapVisitor<K, V> {
- marker: PhantomData<LinkedHashMap<K, V>>,
-}
-
-impl<K, V> LinkedHashMapVisitor<K, V> {
- fn new() -> Self {
- LinkedHashMapVisitor {
- marker: PhantomData,
- }
- }
-}
-
-impl<K, V> Default for LinkedHashMapVisitor<K, V> {
- fn default() -> Self {
- Self::new()
- }
-}
-
-impl<'de, K, V> Visitor<'de> for LinkedHashMapVisitor<K, V>
+impl<'de, K, V, S> Deserialize<'de> for LinkedHashMap<K, V, S>
where
K: Deserialize<'de> + Eq + Hash,
V: Deserialize<'de>,
+ S: BuildHasher + Default,
{
- type Value = LinkedHashMap<K, V>;
-
- fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
- write!(formatter, "a map")
- }
+ fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
+ #[derive(Debug)]
+ pub struct LinkedHashMapVisitor<K, V, S> {
+ marker: PhantomData<LinkedHashMap<K, V, S>>,
+ }
- #[inline]
- fn visit_map<M: MapAccess<'de>>(self, mut map: M) -> Result<Self::Value, M::Error> {
- let mut values = LinkedHashMap::with_capacity(map.size_hint().unwrap_or(0));
+ impl<K, V, S> LinkedHashMapVisitor<K, V, S> {
+ fn new() -> Self {
+ LinkedHashMapVisitor {
+ marker: PhantomData,
+ }
+ }
+ }
- while let Some((k, v)) = map.next_entry()? {
- values.insert(k, v);
+ impl<K, V, S> Default for LinkedHashMapVisitor<K, V, S> {
+ fn default() -> Self {
+ Self::new()
+ }
}
- Ok(values)
- }
-}
+ impl<'de, K, V, S> Visitor<'de> for LinkedHashMapVisitor<K, V, S>
+ where
+ K: Deserialize<'de> + Eq + Hash,
+ V: Deserialize<'de>,
+ S: BuildHasher + Default,
+ {
+ type Value = LinkedHashMap<K, V, S>;
+
+ fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
+ write!(formatter, "a map")
+ }
+
+ #[inline]
+ fn visit_map<M: MapAccess<'de>>(self, mut map: M) -> Result<Self::Value, M::Error> {
+ let mut values = LinkedHashMap::with_capacity_and_hasher(
+ map.size_hint().unwrap_or(0),
+ S::default(),
+ );
+
+ while let Some((k, v)) = map.next_entry()? {
+ values.insert(k, v);
+ }
+
+ Ok(values)
+ }
+ }
-impl<'de, K, V> Deserialize<'de> for LinkedHashMap<K, V>
-where
- K: Deserialize<'de> + Eq + Hash,
- V: Deserialize<'de>,
-{
- fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
deserializer.deserialize_map(LinkedHashMapVisitor::default())
}
}
@@ -100,52 +105,57 @@ where
}
}
-#[derive(Debug)]
-pub struct LinkedHashSetVisitor<T> {
- marker: PhantomData<LinkedHashSet<T>>,
-}
-
-impl<T> LinkedHashSetVisitor<T> {
- fn new() -> Self {
- LinkedHashSetVisitor {
- marker: PhantomData,
- }
- }
-}
-
-impl<T> Default for LinkedHashSetVisitor<T> {
- fn default() -> Self {
- Self::new()
- }
-}
-
-impl<'de, T> Visitor<'de> for LinkedHashSetVisitor<T>
+impl<'de, T, S> Deserialize<'de> for LinkedHashSet<T, S>
where
T: Deserialize<'de> + Eq + Hash,
+ S: BuildHasher + Default,
{
- type Value = LinkedHashSet<T>;
-
- fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
- write!(formatter, "a sequence")
- }
+ fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
+ #[derive(Debug)]
+ pub struct LinkedHashSetVisitor<T, S> {
+ marker: PhantomData<LinkedHashSet<T, S>>,
+ }
- #[inline]
- fn visit_seq<S: SeqAccess<'de>>(self, mut seq: S) -> Result<Self::Value, S::Error> {
- let mut values = LinkedHashSet::with_capacity(seq.size_hint().unwrap_or(0));
+ impl<T, S> LinkedHashSetVisitor<T, S> {
+ fn new() -> Self {
+ LinkedHashSetVisitor {
+ marker: PhantomData,
+ }
+ }
+ }
- while let Some(v) = seq.next_element()? {
- values.insert(v);
+ impl<T, S> Default for LinkedHashSetVisitor<T, S> {
+ fn default() -> Self {
+ Self::new()
+ }
}
- Ok(values)
- }
-}
+ impl<'de, T, S> Visitor<'de> for LinkedHashSetVisitor<T, S>
+ where
+ T: Deserialize<'de> + Eq + Hash,
+ S: BuildHasher + Default,
+ {
+ type Value = LinkedHashSet<T, S>;
+
+ fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
+ write!(formatter, "a sequence")
+ }
+
+ #[inline]
+ fn visit_seq<SA: SeqAccess<'de>>(self, mut seq: SA) -> Result<Self::Value, SA::Error> {
+ let mut values = LinkedHashSet::with_capacity_and_hasher(
+ seq.size_hint().unwrap_or(0),
+ S::default(),
+ );
+
+ while let Some(v) = seq.next_element()? {
+ values.insert(v);
+ }
+
+ Ok(values)
+ }
+ }
-impl<'de, T> Deserialize<'de> for LinkedHashSet<T>
-where
- T: Deserialize<'de> + Eq + Hash,
-{
- fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
deserializer.deserialize_seq(LinkedHashSetVisitor::default())
}
}
diff --git a/tests/linked_hash_map.rs b/tests/linked_hash_map.rs
index 19dcc00..fbd3d2e 100644
--- a/tests/linked_hash_map.rs
+++ b/tests/linked_hash_map.rs
@@ -496,3 +496,18 @@ fn test_retain() {
drop(map);
assert!(c.get() == 4);
}
+
+#[test]
+fn test_order_equality() {
+ let xs = [1, 2, 3, 4, 5, 6];
+ let mut map1: LinkedHashMap<String, i32> = xs.iter().map(|i| (i.to_string(), *i)).collect();
+ let mut map2: LinkedHashMap<String, i32> = xs.iter().map(|i| (i.to_string(), *i)).collect();
+
+ assert_eq!(map1, map2);
+
+ map1.to_front("4");
+ assert_ne!(map1, map2);
+
+ map2.to_front("4");
+ assert_eq!(map1, map2);
+}
diff --git a/tests/linked_hash_set.rs b/tests/linked_hash_set.rs
index 13cceae..cb75887 100644
--- a/tests/linked_hash_set.rs
+++ b/tests/linked_hash_set.rs
@@ -510,3 +510,18 @@ fn to_back_front_order() {
set.to_front(&3);
assert_eq!(set.front().copied(), Some(3));
}
+
+#[test]
+fn test_order_equality() {
+ let xs = [1, 2, 3, 4, 5, 6];
+ let mut set1: LinkedHashSet<i32> = xs.iter().copied().collect();
+ let mut set2: LinkedHashSet<i32> = xs.iter().copied().collect();
+
+ assert_eq!(set1, set2);
+
+ set1.to_front(&4);
+ assert_ne!(set1, set2);
+
+ set2.to_front(&4);
+ assert_eq!(set1, set2);
+}
diff --git a/tests/serde.rs b/tests/serde.rs
index fce3108..d397a24 100644
--- a/tests/serde.rs
+++ b/tests/serde.rs
@@ -1,5 +1,6 @@
#![cfg(feature = "serde_impl")]
+use fxhash::FxBuildHasher;
use hashlink::{LinkedHashMap, LinkedHashSet};
use serde_test::{assert_tokens, Token};
@@ -33,6 +34,35 @@ fn map_serde_tokens() {
}
#[test]
+fn map_serde_tokens_empty_generic() {
+ let map = LinkedHashMap::<char, u32, FxBuildHasher>::with_hasher(FxBuildHasher::default());
+
+ assert_tokens(&map, &[Token::Map { len: Some(0) }, Token::MapEnd]);
+}
+
+#[test]
+fn map_serde_tokens_generic() {
+ let mut map = LinkedHashMap::with_hasher(FxBuildHasher::default());
+ map.insert('a', 10);
+ map.insert('b', 20);
+ map.insert('c', 30);
+
+ assert_tokens(
+ &map,
+ &[
+ Token::Map { len: Some(3) },
+ Token::Char('a'),
+ Token::I32(10),
+ Token::Char('b'),
+ Token::I32(20),
+ Token::Char('c'),
+ Token::I32(30),
+ Token::MapEnd,
+ ],
+ );
+}
+
+#[test]
fn set_serde_tokens_empty() {
let set = LinkedHashSet::<u32>::new();
@@ -57,3 +87,22 @@ fn set_serde_tokens() {
],
);
}
+
+#[test]
+fn set_serde_tokens_generic() {
+ let mut set = LinkedHashSet::with_hasher(FxBuildHasher::default());
+ set.insert('a');
+ set.insert('b');
+ set.insert('c');
+
+ assert_tokens(
+ &set,
+ &[
+ Token::Seq { len: Some(3) },
+ Token::Char('a'),
+ Token::Char('b'),
+ Token::Char('c'),
+ Token::SeqEnd,
+ ],
+ );
+}