aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJeff Vander Stoep <jeffv@google.com>2024-02-05 23:53:39 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2024-02-05 23:53:39 +0000
commita420137dc48e96c90d2e846f2741de771c1aee60 (patch)
tree441740faf26afb9045263f16a15c9519476b5911
parentd97a0370cf221aaf59c9d9f5060f2861ea0e479a (diff)
parent57c1b1d01866400095be731b221b81d275d6fa89 (diff)
downloadserde_test-master.tar.gz
Upgrade serde_test to 1.0.176 am: 57c1b1d018HEADmastermain
Original change: https://android-review.googlesource.com/c/platform/external/rust/crates/serde_test/+/2949320 Change-Id: Ie67e484609bc1fb034c1318824ac640b53f39a25 Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
-rw-r--r--.cargo_vcs_info.json4
-rw-r--r--.github/workflows/ci.yml60
-rw-r--r--.gitignore2
-rw-r--r--Android.bp4
-rw-r--r--Cargo.toml27
-rw-r--r--Cargo.toml.orig18
-rw-r--r--METADATA25
-rw-r--r--README.md148
-rw-r--r--build.rs50
-rw-r--r--crates-io.md65
-rw-r--r--src/assert.rs102
-rw-r--r--src/configure.rs16
-rw-r--r--src/de.rs204
-rw-r--r--src/error.rs3
-rw-r--r--src/lib.rs86
-rw-r--r--src/ser.rs31
-rw-r--r--src/token.rs292
17 files changed, 527 insertions, 610 deletions
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
index e4f82e8..692cec1 100644
--- a/.cargo_vcs_info.json
+++ b/.cargo_vcs_info.json
@@ -1,6 +1,6 @@
{
"git": {
- "sha1": "e3058105f0b1a64018577b12ea19cd255644a17b"
+ "sha1": "d1294b3ad549874d5035752ac62b4eb75cee5060"
},
- "path_in_vcs": "serde_test"
+ "path_in_vcs": ""
} \ No newline at end of file
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..b70309a
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,60 @@
+name: CI
+
+on:
+ push:
+ pull_request:
+ workflow_dispatch:
+ schedule: [cron: "40 1 * * *"]
+
+permissions:
+ contents: read
+
+env:
+ RUSTFLAGS: -Dwarnings
+
+jobs:
+ test:
+ name: Rust ${{matrix.rust}}
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ rust: [stable, beta, nightly, 1.56.0]
+ timeout-minutes: 45
+ steps:
+ - uses: actions/checkout@v3
+ - uses: dtolnay/rust-toolchain@master
+ with:
+ toolchain: ${{matrix.rust}}
+ - run: cargo build
+ - run: cargo test --features serde/derive,serde/rc
+
+ minimal:
+ name: Minimal versions
+ runs-on: ubuntu-latest
+ timeout-minutes: 45
+ steps:
+ - uses: actions/checkout@v3
+ - uses: dtolnay/rust-toolchain@nightly
+ - run: cargo generate-lockfile -Z minimal-versions
+ - run: cargo check --locked
+
+ clippy:
+ name: Clippy
+ runs-on: ubuntu-latest
+ if: github.event_name != 'pull_request'
+ timeout-minutes: 45
+ steps:
+ - uses: actions/checkout@v3
+ - uses: dtolnay/rust-toolchain@clippy
+ - run: cargo clippy -- -Dclippy::all -Dclippy::pedantic
+
+ outdated:
+ name: Outdated
+ runs-on: ubuntu-latest
+ if: github.event_name != 'pull_request'
+ timeout-minutes: 45
+ steps:
+ - uses: actions/checkout@v3
+ - uses: dtolnay/install@cargo-outdated
+ - run: cargo outdated --workspace --exit-code 1
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..4fffb2f
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+/target
+/Cargo.lock
diff --git a/Android.bp b/Android.bp
index 179cac3..797dd83 100644
--- a/Android.bp
+++ b/Android.bp
@@ -43,9 +43,9 @@ rust_library {
host_supported: true,
crate_name: "serde_test",
cargo_env_compat: true,
- cargo_pkg_version: "1.0.158",
+ cargo_pkg_version: "1.0.176",
srcs: ["src/lib.rs"],
- edition: "2015",
+ edition: "2021",
rustlibs: ["libserde"],
apex_available: [
"//apex_available:platform",
diff --git a/Cargo.toml b/Cargo.toml
index 7b22d03..c0a6c39 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -10,26 +10,17 @@
# See Cargo.toml.orig for the original contents.
[package]
-rust-version = "1.19"
+edition = "2021"
+rust-version = "1.56"
name = "serde_test"
-version = "1.0.158"
+version = "1.0.176"
authors = [
"Erick Tryzelaar <erick.tryzelaar@gmail.com>",
"David Tolnay <dtolnay@gmail.com>",
]
-build = "build.rs"
-include = [
- "build.rs",
- "src/**/*.rs",
- "crates-io.md",
- "README.md",
- "LICENSE-APACHE",
- "LICENSE-MIT",
-]
description = "Token De/Serializer for testing De/Serialize implementations"
-homepage = "https://serde.rs"
documentation = "https://docs.rs/serde_test"
-readme = "crates-io.md"
+readme = "README.md"
keywords = [
"serde",
"serialization",
@@ -38,19 +29,21 @@ keywords = [
]
categories = ["development-tools::testing"]
license = "MIT OR Apache-2.0"
-repository = "https://github.com/serde-rs/serde"
+repository = "https://github.com/serde-rs/test"
[package.metadata.docs.rs]
+rustdoc-args = ["--generate-link-to-definition"]
targets = ["x86_64-unknown-linux-gnu"]
[lib]
doc-scrape-examples = false
[dependencies.serde]
-version = "1.0.60"
+version = "1.0.69"
[dev-dependencies.serde]
-version = "1.0"
+version = "1"
+features = ["rc"]
[dev-dependencies.serde_derive]
-version = "1.0"
+version = "1"
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index 1246653..8d0e73e 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,28 +1,26 @@
[package]
name = "serde_test"
-version = "1.0.158" # remember to update html_root_url
+version = "1.0.176" # remember to update html_root_url
authors = ["Erick Tryzelaar <erick.tryzelaar@gmail.com>", "David Tolnay <dtolnay@gmail.com>"]
-build = "build.rs"
categories = ["development-tools::testing"]
description = "Token De/Serializer for testing De/Serialize implementations"
documentation = "https://docs.rs/serde_test"
-homepage = "https://serde.rs"
-include = ["build.rs", "src/**/*.rs", "crates-io.md", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
+edition = "2021"
keywords = ["serde", "serialization", "testing", "dev-dependencies"]
license = "MIT OR Apache-2.0"
-readme = "crates-io.md"
-repository = "https://github.com/serde-rs/serde"
-rust-version = "1.19"
+repository = "https://github.com/serde-rs/test"
+rust-version = "1.56"
[dependencies]
-serde = { version = "1.0.60", path = "../serde" }
+serde = "1.0.69"
[dev-dependencies]
-serde = { version = "1.0", path = "../serde" }
-serde_derive = { version = "1.0", path = "../serde_derive" }
+serde = { version = "1", features = ["rc"] }
+serde_derive = "1"
[lib]
doc-scrape-examples = false
[package.metadata.docs.rs]
targets = ["x86_64-unknown-linux-gnu"]
+rustdoc-args = ["--generate-link-to-definition"]
diff --git a/METADATA b/METADATA
index 6feb147..f012c0f 100644
--- a/METADATA
+++ b/METADATA
@@ -1,23 +1,20 @@
# This project was upgraded with external_updater.
-# Usage: tools/external_updater/updater.sh update rust/crates/serde_test
-# For more info, check https://cs.android.com/android/platform/superproject/+/master:tools/external_updater/README.md
+# Usage: tools/external_updater/updater.sh update external/rust/crates/serde_test
+# For more info, check https://cs.android.com/android/platform/superproject/+/main:tools/external_updater/README.md
name: "serde_test"
description: "Token De/Serializer for testing De/Serialize implementations"
third_party {
- url {
- type: HOMEPAGE
- value: "https://crates.io/crates/serde_test"
- }
- url {
- type: ARCHIVE
- value: "https://static.crates.io/crates/serde_test/serde_test-1.0.158.crate"
- }
- version: "1.0.158"
license_type: NOTICE
last_upgrade_date {
- year: 2023
- month: 3
- day: 20
+ year: 2024
+ month: 2
+ day: 5
+ }
+ homepage: "https://crates.io/crates/serde_test"
+ identifier {
+ type: "Archive"
+ value: "https://static.crates.io/crates/serde_test/serde_test-1.0.176.crate"
+ version: "1.0.176"
}
}
diff --git a/README.md b/README.md
index d53e572..1ff4c73 100644
--- a/README.md
+++ b/README.md
@@ -1,100 +1,70 @@
-# Serde &emsp; [![Build Status]][actions] [![Latest Version]][crates.io] [![serde: rustc 1.19+]][Rust 1.19] [![serde_derive: rustc 1.56+]][Rust 1.56]
-
-[Build Status]: https://img.shields.io/github/actions/workflow/status/serde-rs/serde/ci.yml?branch=master
-[actions]: https://github.com/serde-rs/serde/actions?query=branch%3Amaster
-[Latest Version]: https://img.shields.io/crates/v/serde.svg
-[crates.io]: https://crates.io/crates/serde
-[serde: rustc 1.19+]: https://img.shields.io/badge/serde-rustc_1.19+-lightgray.svg
-[serde_derive: rustc 1.56+]: https://img.shields.io/badge/serde_derive-rustc_1.56+-lightgray.svg
-[Rust 1.19]: https://blog.rust-lang.org/2017/07/20/Rust-1.19.html
-[Rust 1.56]: https://blog.rust-lang.org/2021/10/21/Rust-1.56.0.html
-
-**Serde is a framework for *ser*ializing and *de*serializing Rust data structures efficiently and generically.**
-
----
-
-You may be looking for:
-
-- [An overview of Serde](https://serde.rs/)
-- [Data formats supported by Serde](https://serde.rs/#data-formats)
-- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/derive.html)
-- [Examples](https://serde.rs/examples.html)
-- [API documentation](https://docs.rs/serde)
-- [Release notes](https://github.com/serde-rs/serde/releases)
-
-## Serde in action
-
-<details>
-<summary>
-Click to show Cargo.toml.
-<a href="https://play.rust-lang.org/?edition=2018&gist=72755f28f99afc95e01d63174b28c1f5" target="_blank">Run this code in the playground.</a>
-</summary>
-
-```toml
-[dependencies]
-
-# The core APIs, including the Serialize and Deserialize traits. Always
-# required when using Serde. The "derive" feature is only required when
-# using #[derive(Serialize, Deserialize)] to make Serde work with structs
-# and enums defined in your crate.
-serde = { version = "1.0", features = ["derive"] }
-
-# Each data format lives in its own crate; the sample code below uses JSON
-# but you may be using a different one.
-serde_json = "1.0"
-```
+# serde\_test &emsp; [![Build Status]][actions] [![Latest Version]][crates.io]
-</details>
-<p></p>
+[Build Status]: https://img.shields.io/github/actions/workflow/status/serde-rs/test/ci.yml?branch=master
+[actions]: https://github.com/serde-rs/test/actions?query=branch%3Amaster
+[Latest Version]: https://img.shields.io/crates/v/serde_test.svg
+[crates.io]: https://crates.io/crates/serde\_test
-```rust
-use serde::{Serialize, Deserialize};
+This crate provides a convenient concise way to write unit tests for
+implementations of [`Serialize`] and [`Deserialize`].
-#[derive(Serialize, Deserialize, Debug)]
-struct Point {
- x: i32,
- y: i32,
-}
+[`Serialize`]: serde::ser::Serialize
+[`Deserialize`]: serde::de::Deserialize
-fn main() {
- let point = Point { x: 1, y: 2 };
+The `Serialize` impl for a value can be characterized by the sequence of
+[`Serializer`] calls that are made in the course of serializing the value, so
+`serde_test` provides a [`Token`] abstraction which corresponds roughly to
+`Serializer` method calls. There is an [`assert_ser_tokens`] function to test
+that a value serializes to a particular sequence of method calls, an
+[`assert_de_tokens`] function to test that a value can be deserialized from a
+particular sequence of method calls, and an [`assert_tokens`] function to test
+both directions. There are also functions to test expected failure conditions.
- // Convert the Point to a JSON string.
- let serialized = serde_json::to_string(&point).unwrap();
+[`Serializer`]: serde::ser::Serializer
- // Prints serialized = {"x":1,"y":2}
- println!("serialized = {}", serialized);
+Here is an example from the [`linked-hash-map`] crate.
- // Convert the JSON string back to a Point.
- let deserialized: Point = serde_json::from_str(&serialized).unwrap();
+[`linked-hash-map`]: https://github.com/contain-rs/linked-hash-map
- // Prints deserialized = Point { x: 1, y: 2 }
- println!("deserialized = {:?}", deserialized);
+```rust
+use linked_hash_map::LinkedHashMap;
+use serde_test::{assert_tokens, Token};
+
+#[test]
+fn test_ser_de_empty() {
+ let map = LinkedHashMap::<char, u32>::new();
+
+ assert_tokens(
+ &map,
+ &[
+ Token::Map { len: Some(0) },
+ Token::MapEnd,
+ ],
+ );
}
-```
-## Getting help
-
-Serde is one of the most widely used Rust libraries so any place that Rustaceans
-congregate will be able to help you out. For chat, consider trying the
-[#rust-questions] or [#rust-beginners] channels of the unofficial community
-Discord (invite: <https://discord.gg/rust-lang-community>), the [#rust-usage] or
-[#beginners] channels of the official Rust Project Discord (invite:
-<https://discord.gg/rust-lang>), or the [#general][zulip] stream in Zulip. For
-asynchronous, consider the [\[rust\] tag on StackOverflow][stackoverflow], the
-[/r/rust] subreddit which has a pinned weekly easy questions post, or the Rust
-[Discourse forum][discourse]. It's acceptable to file a support issue in this
-repo but they tend not to get as many eyes as any of the above and may get
-closed without a response after some time.
-
-[#rust-questions]: https://discord.com/channels/273534239310479360/274215136414400513
-[#rust-beginners]: https://discord.com/channels/273534239310479360/273541522815713281
-[#rust-usage]: https://discord.com/channels/442252698964721669/443150878111694848
-[#beginners]: https://discord.com/channels/442252698964721669/448238009733742612
-[zulip]: https://rust-lang.zulipchat.com/#narrow/stream/122651-general
-[stackoverflow]: https://stackoverflow.com/questions/tagged/rust
-[/r/rust]: https://www.reddit.com/r/rust
-[discourse]: https://users.rust-lang.org
+#[test]
+fn test_ser_de() {
+ let mut map = LinkedHashMap::new();
+ map.insert('b', 20);
+ map.insert('a', 10);
+ map.insert('c', 30);
+
+ assert_tokens(
+ &map,
+ &[
+ Token::Map { len: Some(3) },
+ Token::Char('b'),
+ Token::I32(20),
+ Token::Char('a'),
+ Token::I32(10),
+ Token::Char('c'),
+ Token::I32(30),
+ Token::MapEnd,
+ ],
+ );
+}
+```
<br>
@@ -109,6 +79,6 @@ Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
<sub>
Unless you explicitly state otherwise, any contribution intentionally submitted
-for inclusion in Serde by you, as defined in the Apache-2.0 license, shall be
-dual licensed as above, without any additional terms or conditions.
+for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
+be dual licensed as above, without any additional terms or conditions.
</sub>
diff --git a/build.rs b/build.rs
deleted file mode 100644
index b7f69d9..0000000
--- a/build.rs
+++ /dev/null
@@ -1,50 +0,0 @@
-use std::env;
-use std::process::Command;
-use std::str::{self, FromStr};
-
-// The rustc-cfg strings below are *not* public API. Please let us know by
-// opening a GitHub issue if your build environment requires some way to enable
-// these cfgs other than by executing our build script.
-fn main() {
- println!("cargo:rerun-if-changed=build.rs");
-
- let minor = match rustc_minor_version() {
- Some(minor) => minor,
- None => return,
- };
-
- // #[track_caller] stabilized in Rust 1.46:
- // https://blog.rust-lang.org/2020/08/27/Rust-1.46.0.html#track_caller
- if minor < 46 {
- println!("cargo:rustc-cfg=no_track_caller");
- }
-}
-
-fn rustc_minor_version() -> Option<u32> {
- let rustc = match env::var_os("RUSTC") {
- Some(rustc) => rustc,
- None => return None,
- };
-
- let output = match Command::new(rustc).arg("--version").output() {
- Ok(output) => output,
- Err(_) => return None,
- };
-
- let version = match str::from_utf8(&output.stdout) {
- Ok(version) => version,
- Err(_) => return None,
- };
-
- let mut pieces = version.split('.');
- if pieces.next() != Some("rustc 1") {
- return None;
- }
-
- let next = match pieces.next() {
- Some(next) => next,
- None => return None,
- };
-
- u32::from_str(next).ok()
-}
diff --git a/crates-io.md b/crates-io.md
deleted file mode 100644
index 6e0ec28..0000000
--- a/crates-io.md
+++ /dev/null
@@ -1,65 +0,0 @@
-<!-- Serde readme rendered on crates.io -->
-
-**Serde is a framework for *ser*ializing and *de*serializing Rust data structures efficiently and generically.**
-
----
-
-You may be looking for:
-
-- [An overview of Serde](https://serde.rs/)
-- [Data formats supported by Serde](https://serde.rs/#data-formats)
-- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/derive.html)
-- [Examples](https://serde.rs/examples.html)
-- [API documentation](https://docs.rs/serde)
-- [Release notes](https://github.com/serde-rs/serde/releases)
-
-## Serde in action
-
-```rust
-use serde::{Serialize, Deserialize};
-
-#[derive(Serialize, Deserialize, Debug)]
-struct Point {
- x: i32,
- y: i32,
-}
-
-fn main() {
- let point = Point { x: 1, y: 2 };
-
- // Convert the Point to a JSON string.
- let serialized = serde_json::to_string(&point).unwrap();
-
- // Prints serialized = {"x":1,"y":2}
- println!("serialized = {}", serialized);
-
- // Convert the JSON string back to a Point.
- let deserialized: Point = serde_json::from_str(&serialized).unwrap();
-
- // Prints deserialized = Point { x: 1, y: 2 }
- println!("deserialized = {:?}", deserialized);
-}
-```
-
-## Getting help
-
-Serde is one of the most widely used Rust libraries so any place that Rustaceans
-congregate will be able to help you out. For chat, consider trying the
-[#rust-questions] or [#rust-beginners] channels of the unofficial community
-Discord (invite: <https://discord.gg/rust-lang-community>, the [#rust-usage] or
-[#beginners] channels of the official Rust Project Discord (invite:
-<https://discord.gg/rust-lang>), or the [#general][zulip] stream in Zulip. For
-asynchronous, consider the [\[rust\] tag on StackOverflow][stackoverflow], the
-[/r/rust] subreddit which has a pinned weekly easy questions post, or the Rust
-[Discourse forum][discourse]. It's acceptable to file a support issue in this
-repo but they tend not to get as many eyes as any of the above and may get
-closed without a response after some time.
-
-[#rust-questions]: https://discord.com/channels/273534239310479360/274215136414400513
-[#rust-beginners]: https://discord.com/channels/273534239310479360/273541522815713281
-[#rust-usage]: https://discord.com/channels/442252698964721669/443150878111694848
-[#beginners]: https://discord.com/channels/442252698964721669/448238009733742612
-[zulip]: https://rust-lang.zulipchat.com/#narrow/stream/122651-general
-[stackoverflow]: https://stackoverflow.com/questions/tagged/rust
-[/r/rust]: https://www.reddit.com/r/rust
-[discourse]: https://users.rust-lang.org
diff --git a/src/assert.rs b/src/assert.rs
index 9be153d..bbdafd9 100644
--- a/src/assert.rs
+++ b/src/assert.rs
@@ -1,15 +1,13 @@
+use crate::de::Deserializer;
+use crate::ser::Serializer;
+use crate::token::Token;
use serde::{Deserialize, Serialize};
-
-use de::Deserializer;
-use ser::Serializer;
-use token::Token;
-
use std::fmt::Debug;
/// Runs both `assert_ser_tokens` and `assert_de_tokens`.
///
-/// ```edition2018
-/// # use serde::{Serialize, Deserialize};
+/// ```
+/// # use serde_derive::{Deserialize, Serialize};
/// # use serde_test::{assert_tokens, Token};
/// #
/// #[derive(Serialize, Deserialize, PartialEq, Debug)]
@@ -19,14 +17,17 @@ use std::fmt::Debug;
/// }
///
/// let s = S { a: 0, b: 0 };
-/// assert_tokens(&s, &[
-/// Token::Struct { name: "S", len: 2 },
-/// Token::Str("a"),
-/// Token::U8(0),
-/// Token::Str("b"),
-/// Token::U8(0),
-/// Token::StructEnd,
-/// ]);
+/// assert_tokens(
+/// &s,
+/// &[
+/// Token::Struct { name: "S", len: 2 },
+/// Token::Str("a"),
+/// Token::U8(0),
+/// Token::Str("b"),
+/// Token::U8(0),
+/// Token::StructEnd,
+/// ],
+/// );
/// ```
#[cfg_attr(not(no_track_caller), track_caller)]
pub fn assert_tokens<'de, T>(value: &T, tokens: &'de [Token])
@@ -39,8 +40,8 @@ where
/// Asserts that `value` serializes to the given `tokens`.
///
-/// ```edition2018
-/// # use serde::{Serialize, Deserialize};
+/// ```
+/// # use serde_derive::{Deserialize, Serialize};
/// # use serde_test::{assert_ser_tokens, Token};
/// #
/// #[derive(Serialize, Deserialize, PartialEq, Debug)]
@@ -50,14 +51,17 @@ where
/// }
///
/// let s = S { a: 0, b: 0 };
-/// assert_ser_tokens(&s, &[
-/// Token::Struct { name: "S", len: 2 },
-/// Token::Str("a"),
-/// Token::U8(0),
-/// Token::Str("b"),
-/// Token::U8(0),
-/// Token::StructEnd,
-/// ]);
+/// assert_ser_tokens(
+/// &s,
+/// &[
+/// Token::Struct { name: "S", len: 2 },
+/// Token::Str("a"),
+/// Token::U8(0),
+/// Token::Str("b"),
+/// Token::U8(0),
+/// Token::StructEnd,
+/// ],
+/// );
/// ```
#[cfg_attr(not(no_track_caller), track_caller)]
pub fn assert_ser_tokens<T: ?Sized>(value: &T, tokens: &[Token])
@@ -78,23 +82,24 @@ where
/// Asserts that `value` serializes to the given `tokens`, and then yields
/// `error`.
///
-/// ```edition2018
+/// ```
+/// use serde_derive::Serialize;
+/// use serde_test::{assert_ser_tokens_error, Token};
/// use std::sync::{Arc, Mutex};
/// use std::thread;
///
-/// use serde::Serialize;
-/// use serde_test::{assert_ser_tokens_error, Token};
-///
/// #[derive(Serialize)]
/// struct Example {
/// lock: Arc<Mutex<u32>>,
/// }
///
/// fn main() {
-/// let example = Example { lock: Arc::new(Mutex::new(0)) };
+/// let example = Example {
+/// lock: Arc::new(Mutex::new(0)),
+/// };
/// let lock = example.lock.clone();
///
-/// let _ = thread::spawn(move || {
+/// let thread = thread::spawn(move || {
/// // This thread will acquire the mutex first, unwrapping the result
/// // of `lock` because the lock has not been poisoned.
/// let _guard = lock.lock().unwrap();
@@ -102,10 +107,14 @@ where
/// // This panic while holding the lock (`_guard` is in scope) will
/// // poison the mutex.
/// panic!()
-/// }).join();
+/// });
+/// thread.join();
///
/// let expected = &[
-/// Token::Struct { name: "Example", len: 1 },
+/// Token::Struct {
+/// name: "Example",
+/// len: 1,
+/// },
/// Token::Str("lock"),
/// ];
/// let error = "lock poison error while serializing";
@@ -130,8 +139,8 @@ where
/// Asserts that the given `tokens` deserialize into `value`.
///
-/// ```edition2018
-/// # use serde::{Serialize, Deserialize};
+/// ```
+/// # use serde_derive::{Deserialize, Serialize};
/// # use serde_test::{assert_de_tokens, Token};
/// #
/// #[derive(Serialize, Deserialize, PartialEq, Debug)]
@@ -141,14 +150,17 @@ where
/// }
///
/// let s = S { a: 0, b: 0 };
-/// assert_de_tokens(&s, &[
-/// Token::Struct { name: "S", len: 2 },
-/// Token::Str("a"),
-/// Token::U8(0),
-/// Token::Str("b"),
-/// Token::U8(0),
-/// Token::StructEnd,
-/// ]);
+/// assert_de_tokens(
+/// &s,
+/// &[
+/// Token::Struct { name: "S", len: 2 },
+/// Token::Str("a"),
+/// Token::U8(0),
+/// Token::Str("b"),
+/// Token::U8(0),
+/// Token::StructEnd,
+/// ],
+/// );
/// ```
#[cfg_attr(not(no_track_caller), track_caller)]
pub fn assert_de_tokens<'de, T>(value: &T, tokens: &'de [Token])
@@ -184,8 +196,8 @@ where
/// Asserts that the given `tokens` yield `error` when deserializing.
///
-/// ```edition2018
-/// # use serde::{Serialize, Deserialize};
+/// ```
+/// # use serde_derive::{Deserialize, Serialize};
/// # use serde_test::{assert_de_tokens_error, Token};
/// #
/// #[derive(Serialize, Deserialize, PartialEq, Debug)]
diff --git a/src/configure.rs b/src/configure.rs
index d34ad90..9b10370 100644
--- a/src/configure.rs
+++ b/src/configure.rs
@@ -1,10 +1,12 @@
-use std::fmt;
-
+use serde::de::{
+ Deserialize, DeserializeSeed, Deserializer, EnumAccess, Error, MapAccess, SeqAccess,
+ VariantAccess, Visitor,
+};
use serde::ser::{
- SerializeMap, SerializeSeq, SerializeStruct, SerializeStructVariant, SerializeTuple,
- SerializeTupleStruct, SerializeTupleVariant,
+ Serialize, SerializeMap, SerializeSeq, SerializeStruct, SerializeStructVariant, SerializeTuple,
+ SerializeTupleStruct, SerializeTupleVariant, Serializer,
};
-use serde::{Deserialize, Deserializer, Serialize, Serializer};
+use std::fmt;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct Readable<T: ?Sized>(T);
@@ -14,7 +16,7 @@ pub struct Compact<T: ?Sized>(T);
/// Trait to determine whether a value is represented in human-readable or
/// compact form.
///
-/// ```edition2018
+/// ```
/// use serde::{Deserialize, Deserializer, Serialize, Serializer};
/// use serde_test::{assert_tokens, Configure, Token};
///
@@ -467,8 +469,6 @@ macro_rules! impl_serializer {
impl_serializer!(Readable, true);
impl_serializer!(Compact, false);
-use serde::de::{DeserializeSeed, EnumAccess, Error, MapAccess, SeqAccess, VariantAccess, Visitor};
-
macro_rules! forward_deserialize_methods {
( $wrapper : ident ( $( $name: ident ),* ) ) => {
$(
diff --git a/src/de.rs b/src/de.rs
index 673a0c0..a5d9e1d 100644
--- a/src/de.rs
+++ b/src/de.rs
@@ -1,59 +1,53 @@
+use crate::error::Error;
+use crate::token::Token;
use serde::de::value::{MapAccessDeserializer, SeqAccessDeserializer};
use serde::de::{
self, Deserialize, DeserializeSeed, EnumAccess, IntoDeserializer, MapAccess, SeqAccess,
VariantAccess, Visitor,
};
-
-use error::Error;
-use token::Token;
+use serde::forward_to_deserialize_any;
#[derive(Debug)]
pub struct Deserializer<'de> {
tokens: &'de [Token],
}
-macro_rules! assert_next_token {
- ($de:expr, $expected:expr) => {
- match $de.next_token_opt() {
- Some(token) if token == $expected => {}
- Some(other) => panic!(
- "expected Token::{} but deserialization wants Token::{}",
- other, $expected
- ),
- None => panic!(
- "end of tokens but deserialization wants Token::{}",
- $expected
- ),
- }
- };
+fn assert_next_token(de: &mut Deserializer, expected: Token) -> Result<(), Error> {
+ match de.next_token_opt() {
+ Some(token) if token == expected => Ok(()),
+ Some(other) => Err(de::Error::custom(format!(
+ "expected Token::{} but deserialization wants Token::{}",
+ other, expected,
+ ))),
+ None => Err(de::Error::custom(format!(
+ "end of tokens but deserialization wants Token::{}",
+ expected,
+ ))),
+ }
}
-macro_rules! unexpected {
- ($token:expr) => {
- panic!("deserialization did not expect this token: {}", $token)
- };
+fn unexpected(token: Token) -> Error {
+ de::Error::custom(format!(
+ "deserialization did not expect this token: {}",
+ token,
+ ))
}
-macro_rules! end_of_tokens {
- () => {
- panic!("ran out of tokens to deserialize")
- };
+fn end_of_tokens() -> Error {
+ de::Error::custom("ran out of tokens to deserialize")
}
impl<'de> Deserializer<'de> {
pub fn new(tokens: &'de [Token]) -> Self {
- Deserializer { tokens: tokens }
+ Deserializer { tokens }
}
fn peek_token_opt(&self) -> Option<Token> {
- self.tokens.first().cloned()
+ self.tokens.first().copied()
}
- fn peek_token(&self) -> Token {
- match self.peek_token_opt() {
- Some(token) => token,
- None => end_of_tokens!(),
- }
+ fn peek_token(&self) -> Result<Token, Error> {
+ self.peek_token_opt().ok_or_else(end_of_tokens)
}
pub fn next_token_opt(&mut self) -> Option<Token> {
@@ -66,14 +60,10 @@ impl<'de> Deserializer<'de> {
}
}
- fn next_token(&mut self) -> Token {
- match self.tokens.split_first() {
- Some((&first, rest)) => {
- self.tokens = rest;
- first
- }
- None => end_of_tokens!(),
- }
+ fn next_token(&mut self) -> Result<Token, Error> {
+ let (&first, rest) = self.tokens.split_first().ok_or_else(end_of_tokens)?;
+ self.tokens = rest;
+ Ok(first)
}
pub fn remaining(&self) -> usize {
@@ -89,12 +79,8 @@ impl<'de> Deserializer<'de> {
where
V: Visitor<'de>,
{
- let value = visitor.visit_seq(DeserializerSeqVisitor {
- de: self,
- len: len,
- end: end,
- })?;
- assert_next_token!(self, end);
+ let value = visitor.visit_seq(DeserializerSeqVisitor { de: self, len, end })?;
+ assert_next_token(self, end)?;
Ok(value)
}
@@ -107,12 +93,8 @@ impl<'de> Deserializer<'de> {
where
V: Visitor<'de>,
{
- let value = visitor.visit_map(DeserializerMapVisitor {
- de: self,
- len: len,
- end: end,
- })?;
- assert_next_token!(self, end);
+ let value = visitor.visit_map(DeserializerMapVisitor { de: self, len, end })?;
+ assert_next_token(self, end)?;
Ok(value)
}
}
@@ -129,7 +111,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where
V: Visitor<'de>,
{
- let token = self.next_token();
+ let token = self.next_token()?;
match token {
Token::Bool(v) => visitor.visit_bool(v),
Token::I8(v) => visitor.visit_i8(v),
@@ -161,50 +143,50 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
Token::Map { len } => self.visit_map(len, Token::MapEnd, visitor),
Token::Struct { len, .. } => self.visit_map(Some(len), Token::StructEnd, visitor),
Token::Enum { .. } => {
- let variant = self.next_token();
- let next = self.peek_token();
+ let variant = self.next_token()?;
+ let next = self.peek_token()?;
match (variant, next) {
(Token::Str(variant), Token::Unit) => {
- self.next_token();
+ self.next_token()?;
visitor.visit_str(variant)
}
(Token::BorrowedStr(variant), Token::Unit) => {
- self.next_token();
+ self.next_token()?;
visitor.visit_borrowed_str(variant)
}
(Token::String(variant), Token::Unit) => {
- self.next_token();
+ self.next_token()?;
visitor.visit_string(variant.to_string())
}
(Token::Bytes(variant), Token::Unit) => {
- self.next_token();
+ self.next_token()?;
visitor.visit_bytes(variant)
}
(Token::BorrowedBytes(variant), Token::Unit) => {
- self.next_token();
+ self.next_token()?;
visitor.visit_borrowed_bytes(variant)
}
(Token::ByteBuf(variant), Token::Unit) => {
- self.next_token();
+ self.next_token()?;
visitor.visit_byte_buf(variant.to_vec())
}
(Token::U8(variant), Token::Unit) => {
- self.next_token();
+ self.next_token()?;
visitor.visit_u8(variant)
}
(Token::U16(variant), Token::Unit) => {
- self.next_token();
+ self.next_token()?;
visitor.visit_u16(variant)
}
(Token::U32(variant), Token::Unit) => {
- self.next_token();
+ self.next_token()?;
visitor.visit_u32(variant)
}
(Token::U64(variant), Token::Unit) => {
- self.next_token();
+ self.next_token()?;
visitor.visit_u64(variant)
}
- (variant, Token::Unit) => unexpected!(variant),
+ (variant, Token::Unit) => Err(unexpected(variant)),
(variant, _) => {
visitor.visit_map(EnumMapVisitor::new(self, variant, EnumFormat::Any))
}
@@ -232,9 +214,7 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
| Token::MapEnd
| Token::StructEnd
| Token::TupleVariantEnd
- | Token::StructVariantEnd => {
- unexpected!(token);
- }
+ | Token::StructVariantEnd => Err(unexpected(token)),
}
}
@@ -242,13 +222,13 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where
V: Visitor<'de>,
{
- match self.peek_token() {
+ match self.peek_token()? {
Token::Unit | Token::None => {
- self.next_token();
+ self.next_token()?;
visitor.visit_none()
}
Token::Some => {
- self.next_token();
+ self.next_token()?;
visitor.visit_some(self)
}
_ => self.deserialize_any(visitor),
@@ -264,9 +244,9 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where
V: Visitor<'de>,
{
- match self.peek_token() {
+ match self.peek_token()? {
Token::Enum { name: n } if name == n => {
- self.next_token();
+ self.next_token()?;
visitor.visit_enum(DeserializerEnumVisitor { de: self })
}
@@ -286,9 +266,9 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where
V: Visitor<'de>,
{
- match self.peek_token() {
+ match self.peek_token()? {
Token::UnitStruct { .. } => {
- assert_next_token!(self, Token::UnitStruct { name: name });
+ assert_next_token(self, Token::UnitStruct { name })?;
visitor.visit_unit()
}
_ => self.deserialize_any(visitor),
@@ -303,9 +283,9 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where
V: Visitor<'de>,
{
- match self.peek_token() {
+ match self.peek_token()? {
Token::NewtypeStruct { .. } => {
- assert_next_token!(self, Token::NewtypeStruct { name: name });
+ assert_next_token(self, Token::NewtypeStruct { name })?;
visitor.visit_newtype_struct(self)
}
_ => self.deserialize_any(visitor),
@@ -316,21 +296,21 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where
V: Visitor<'de>,
{
- match self.peek_token() {
+ match self.peek_token()? {
Token::Unit | Token::UnitStruct { .. } => {
- self.next_token();
+ self.next_token()?;
visitor.visit_unit()
}
Token::Seq { .. } => {
- self.next_token();
+ self.next_token()?;
self.visit_seq(Some(len), Token::SeqEnd, visitor)
}
Token::Tuple { .. } => {
- self.next_token();
+ self.next_token()?;
self.visit_seq(Some(len), Token::TupleEnd, visitor)
}
Token::TupleStruct { .. } => {
- self.next_token();
+ self.next_token()?;
self.visit_seq(Some(len), Token::TupleStructEnd, visitor)
}
_ => self.deserialize_any(visitor),
@@ -346,25 +326,25 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where
V: Visitor<'de>,
{
- match self.peek_token() {
+ match self.peek_token()? {
Token::Unit => {
- self.next_token();
+ self.next_token()?;
visitor.visit_unit()
}
Token::UnitStruct { .. } => {
- assert_next_token!(self, Token::UnitStruct { name: name });
+ assert_next_token(self, Token::UnitStruct { name })?;
visitor.visit_unit()
}
Token::Seq { .. } => {
- self.next_token();
+ self.next_token()?;
self.visit_seq(Some(len), Token::SeqEnd, visitor)
}
Token::Tuple { .. } => {
- self.next_token();
+ self.next_token()?;
self.visit_seq(Some(len), Token::TupleEnd, visitor)
}
Token::TupleStruct { len: n, .. } => {
- assert_next_token!(self, Token::TupleStruct { name: name, len: n });
+ assert_next_token(self, Token::TupleStruct { name, len: n })?;
self.visit_seq(Some(len), Token::TupleStructEnd, visitor)
}
_ => self.deserialize_any(visitor),
@@ -380,13 +360,13 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut Deserializer<'de> {
where
V: Visitor<'de>,
{
- match self.peek_token() {
+ match self.peek_token()? {
Token::Struct { len: n, .. } => {
- assert_next_token!(self, Token::Struct { name: name, len: n });
+ assert_next_token(self, Token::Struct { name, len: n })?;
self.visit_map(Some(fields.len()), Token::StructEnd, visitor)
}
Token::Map { .. } => {
- self.next_token();
+ self.next_token()?;
self.visit_map(Some(fields.len()), Token::MapEnd, visitor)
}
_ => self.deserialize_any(visitor),
@@ -476,7 +456,7 @@ impl<'de, 'a> EnumAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
where
V: DeserializeSeed<'de>,
{
- match self.de.peek_token() {
+ match self.de.peek_token()? {
Token::UnitVariant { variant: v, .. }
| Token::NewtypeVariant { variant: v, .. }
| Token::TupleVariant { variant: v, .. }
@@ -497,9 +477,9 @@ impl<'de, 'a> VariantAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
type Error = Error;
fn unit_variant(self) -> Result<(), Error> {
- match self.de.peek_token() {
+ match self.de.peek_token()? {
Token::UnitVariant { .. } => {
- self.de.next_token();
+ self.de.next_token()?;
Ok(())
}
_ => Deserialize::deserialize(self.de),
@@ -510,9 +490,9 @@ impl<'de, 'a> VariantAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
where
T: DeserializeSeed<'de>,
{
- match self.de.peek_token() {
+ match self.de.peek_token()? {
Token::NewtypeVariant { .. } => {
- self.de.next_token();
+ self.de.next_token()?;
seed.deserialize(self.de)
}
_ => seed.deserialize(self.de),
@@ -523,26 +503,26 @@ impl<'de, 'a> VariantAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
where
V: Visitor<'de>,
{
- match self.de.peek_token() {
+ match self.de.peek_token()? {
Token::TupleVariant { len: enum_len, .. } => {
- let token = self.de.next_token();
+ let token = self.de.next_token()?;
if len == enum_len {
self.de
.visit_seq(Some(len), Token::TupleVariantEnd, visitor)
} else {
- unexpected!(token);
+ Err(unexpected(token))
}
}
Token::Seq {
len: Some(enum_len),
} => {
- let token = self.de.next_token();
+ let token = self.de.next_token()?;
if len == enum_len {
self.de.visit_seq(Some(len), Token::SeqEnd, visitor)
} else {
- unexpected!(token);
+ Err(unexpected(token))
}
}
_ => de::Deserializer::deserialize_any(self.de, visitor),
@@ -557,27 +537,27 @@ impl<'de, 'a> VariantAccess<'de> for DeserializerEnumVisitor<'a, 'de> {
where
V: Visitor<'de>,
{
- match self.de.peek_token() {
+ match self.de.peek_token()? {
Token::StructVariant { len: enum_len, .. } => {
- let token = self.de.next_token();
+ let token = self.de.next_token()?;
if fields.len() == enum_len {
self.de
.visit_map(Some(fields.len()), Token::StructVariantEnd, visitor)
} else {
- unexpected!(token);
+ Err(unexpected(token))
}
}
Token::Map {
len: Some(enum_len),
} => {
- let token = self.de.next_token();
+ let token = self.de.next_token()?;
if fields.len() == enum_len {
self.de
.visit_map(Some(fields.len()), Token::MapEnd, visitor)
} else {
- unexpected!(token);
+ Err(unexpected(token))
}
}
_ => de::Deserializer::deserialize_any(self.de, visitor),
@@ -602,9 +582,9 @@ enum EnumFormat {
impl<'a, 'de> EnumMapVisitor<'a, 'de> {
fn new(de: &'a mut Deserializer<'de>, variant: Token, format: EnumFormat) -> Self {
EnumMapVisitor {
- de: de,
+ de,
variant: Some(variant),
- format: format,
+ format,
}
}
}
@@ -622,7 +602,7 @@ impl<'de, 'a> MapAccess<'de> for EnumMapVisitor<'a, 'de> {
.deserialize(BytesDeserializer { value: variant })
.map(Some),
Some(Token::U32(variant)) => seed.deserialize(variant.into_deserializer()).map(Some),
- Some(other) => unexpected!(other),
+ Some(other) => Err(unexpected(other)),
None => Ok(None),
}
}
@@ -641,7 +621,7 @@ impl<'de, 'a> MapAccess<'de> for EnumMapVisitor<'a, 'de> {
};
seed.deserialize(SeqAccessDeserializer::new(visitor))?
};
- assert_next_token!(self.de, Token::TupleVariantEnd);
+ assert_next_token(self.de, Token::TupleVariantEnd)?;
Ok(value)
}
EnumFormat::Map => {
@@ -653,7 +633,7 @@ impl<'de, 'a> MapAccess<'de> for EnumMapVisitor<'a, 'de> {
};
seed.deserialize(MapAccessDeserializer::new(visitor))?
};
- assert_next_token!(self.de, Token::StructVariantEnd);
+ assert_next_token(self.de, Token::StructVariantEnd)?;
Ok(value)
}
EnumFormat::Any => seed.deserialize(&mut *self.de),
diff --git a/src/error.rs b/src/error.rs
index 54a71d2..73a7e45 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -1,8 +1,7 @@
+use serde::{de, ser};
use std::error;
use std::fmt::{self, Display};
-use serde::{de, ser};
-
#[derive(Clone, Debug)]
pub struct Error {
msg: String,
diff --git a/src/lib.rs b/src/lib.rs
index 908ddfc..4e3a9f2 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -20,11 +20,11 @@
//!
//! [`linked-hash-map`]: https://github.com/contain-rs/linked-hash-map
//!
-//! ```edition2018
+//! ```
//! # const IGNORE: &str = stringify! {
//! use linked_hash_map::LinkedHashMap;
//! # };
-//! use serde_test::{Token, assert_tokens};
+//! use serde_test::{assert_tokens, Token};
//!
//! # use std::fmt;
//! # use std::marker::PhantomData;
@@ -106,10 +106,13 @@
//! fn test_ser_de_empty() {
//! let map = LinkedHashMap::<char, u32>::new();
//!
-//! assert_tokens(&map, &[
-//! Token::Map { len: Some(0) },
-//! Token::MapEnd,
-//! ]);
+//! assert_tokens(
+//! &map,
+//! &[
+//! Token::Map { len: Some(0) },
+//! Token::MapEnd,
+//! ],
+//! );
//! }
//!
//! #[test]
@@ -120,18 +123,19 @@
//! map.insert('a', 10);
//! map.insert('c', 30);
//!
-//! assert_tokens(&map, &[
-//! Token::Map { len: Some(3) },
-//! Token::Char('b'),
-//! Token::I32(20),
-//!
-//! Token::Char('a'),
-//! Token::I32(10),
-//!
-//! Token::Char('c'),
-//! Token::I32(30),
-//! Token::MapEnd,
-//! ]);
+//! assert_tokens(
+//! &map,
+//! &[
+//! Token::Map { len: Some(3) },
+//! Token::Char('b'),
+//! Token::I32(20),
+//! Token::Char('a'),
+//! Token::I32(10),
+//! Token::Char('c'),
+//! Token::I32(30),
+//! Token::MapEnd,
+//! ],
+//! );
//! }
//! #
//! # fn main() {
@@ -140,49 +144,27 @@
//! # }
//! ```
-#![doc(html_root_url = "https://docs.rs/serde_test/1.0.158")]
-#![cfg_attr(feature = "cargo-clippy", allow(renamed_and_removed_lints))]
+#![doc(html_root_url = "https://docs.rs/serde_test/1.0.176")]
// Ignored clippy lints
-#![cfg_attr(feature = "cargo-clippy", allow(float_cmp, needless_doctest_main))]
+#![allow(clippy::float_cmp, clippy::needless_doctest_main)]
// Ignored clippy_pedantic lints
-#![cfg_attr(
- feature = "cargo-clippy",
- allow(
- cloned_instead_of_copied,
- doc_link_with_quotes, // https://github.com/rust-lang/rust-clippy/issues/8961
- empty_line_after_outer_attr,
- manual_assert,
- missing_docs_in_private_items,
- missing_panics_doc,
- module_name_repetitions,
- must_use_candidate,
- redundant_field_names,
- too_many_lines,
- type_repetition_in_bounds, // https://github.com/rust-lang/rust-clippy/issues/8772
- use_debug,
- use_self
- )
+#![allow(
+ clippy::manual_assert,
+ clippy::missing_panics_doc,
+ clippy::module_name_repetitions,
+ clippy::too_many_lines
)]
-#[macro_use]
-extern crate serde;
-
+mod assert;
+mod configure;
mod de;
mod error;
mod ser;
-
-mod assert;
-mod configure;
mod token;
-pub use assert::{
+pub use crate::assert::{
assert_de_tokens, assert_de_tokens_error, assert_ser_tokens, assert_ser_tokens_error,
assert_tokens,
};
-pub use token::Token;
-
-pub use configure::{Compact, Configure, Readable};
-
-// Not public API.
-#[doc(hidden)]
-pub use de::Deserializer;
+pub use crate::configure::{Compact, Configure, Readable};
+pub use crate::token::Token;
diff --git a/src/ser.rs b/src/ser.rs
index 0827e79..c18a4ae 100644
--- a/src/ser.rs
+++ b/src/ser.rs
@@ -1,7 +1,6 @@
-use serde::{ser, Serialize};
-
-use error::Error;
-use token::Token;
+use crate::error::Error;
+use crate::token::Token;
+use serde::ser::{self, Serialize};
/// A `Serializer` that ensures that a value serializes to a given list of
/// tokens.
@@ -13,7 +12,7 @@ pub struct Serializer<'a> {
impl<'a> Serializer<'a> {
/// Creates the serializer.
pub fn new(tokens: &'a [Token]) -> Self {
- Serializer { tokens: tokens }
+ Serializer { tokens }
}
/// Pulls the next token off of the serializer, ignoring it.
@@ -63,14 +62,12 @@ macro_rules! assert_next_token {
($ser:expr, $actual:expr, $pat:pat, $guard:expr) => {
match $ser.next_token() {
Some($pat) if $guard => {}
- Some(expected) => {
- panic!("expected Token::{} but serialized as {}",
- expected, $actual);
- }
- None => {
- panic!("expected end of tokens, but {} was serialized",
- $actual);
- }
+ Some(expected) => return Err(ser::Error::custom(
+ format!("expected Token::{} but serialized as {}", expected, $actual)
+ )),
+ None => return Err(ser::Error::custom(
+ format!("expected end of tokens, but {} was serialized", $actual)
+ )),
}
};
}
@@ -181,7 +178,7 @@ impl<'s, 'a> ser::Serializer for &'s mut Serializer<'a> {
_variant_index: u32,
variant: &'static str,
) -> Result<(), Error> {
- if self.tokens.first() == Some(&Token::Enum { name: name }) {
+ if self.tokens.first() == Some(&Token::Enum { name }) {
self.next_token();
assert_next_token!(self, Str(variant));
assert_next_token!(self, Unit);
@@ -209,7 +206,7 @@ impl<'s, 'a> ser::Serializer for &'s mut Serializer<'a> {
where
T: Serialize,
{
- if self.tokens.first() == Some(&Token::Enum { name: name }) {
+ if self.tokens.first() == Some(&Token::Enum { name }) {
self.next_token();
assert_next_token!(self, Str(variant));
} else {
@@ -253,7 +250,7 @@ impl<'s, 'a> ser::Serializer for &'s mut Serializer<'a> {
variant: &'static str,
len: usize,
) -> Result<Self::SerializeTupleVariant, Error> {
- if self.tokens.first() == Some(&Token::Enum { name: name }) {
+ if self.tokens.first() == Some(&Token::Enum { name }) {
self.next_token();
assert_next_token!(self, Str(variant));
let len = Some(len);
@@ -288,7 +285,7 @@ impl<'s, 'a> ser::Serializer for &'s mut Serializer<'a> {
variant: &'static str,
len: usize,
) -> Result<Self::SerializeStructVariant, Error> {
- if self.tokens.first() == Some(&Token::Enum { name: name }) {
+ if self.tokens.first() == Some(&Token::Enum { name }) {
self.next_token();
assert_next_token!(self, Str(variant));
let len = Some(len);
diff --git a/src/token.rs b/src/token.rs
index 2251361..03b3b7b 100644
--- a/src/token.rs
+++ b/src/token.rs
@@ -4,7 +4,7 @@ use std::fmt::{self, Debug, Display};
pub enum Token {
/// A serialized `bool`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// assert_tokens(&true, &[Token::Bool(true)]);
@@ -13,7 +13,7 @@ pub enum Token {
/// A serialized `i8`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// assert_tokens(&0i8, &[Token::I8(0)]);
@@ -22,7 +22,7 @@ pub enum Token {
/// A serialized `i16`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// assert_tokens(&0i16, &[Token::I16(0)]);
@@ -31,7 +31,7 @@ pub enum Token {
/// A serialized `i32`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// assert_tokens(&0i32, &[Token::I32(0)]);
@@ -40,7 +40,7 @@ pub enum Token {
/// A serialized `i64`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// assert_tokens(&0i64, &[Token::I64(0)]);
@@ -49,7 +49,7 @@ pub enum Token {
/// A serialized `u8`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// assert_tokens(&0u8, &[Token::U8(0)]);
@@ -58,7 +58,7 @@ pub enum Token {
/// A serialized `u16`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// assert_tokens(&0u16, &[Token::U16(0)]);
@@ -67,7 +67,7 @@ pub enum Token {
/// A serialized `u32`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// assert_tokens(&0u32, &[Token::U32(0)]);
@@ -76,7 +76,7 @@ pub enum Token {
/// A serialized `u64`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// assert_tokens(&0u64, &[Token::U64(0)]);
@@ -85,7 +85,7 @@ pub enum Token {
/// A serialized `f32`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// assert_tokens(&0f32, &[Token::F32(0.0)]);
@@ -94,7 +94,7 @@ pub enum Token {
/// A serialized `f64`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// assert_tokens(&0f64, &[Token::F64(0.0)]);
@@ -103,7 +103,7 @@ pub enum Token {
/// A serialized `char`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// assert_tokens(&'\n', &[Token::Char('\n')]);
@@ -112,7 +112,7 @@ pub enum Token {
/// A serialized `str`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// let s = String::from("transient");
@@ -122,7 +122,7 @@ pub enum Token {
/// A borrowed `str`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// let s: &str = "borrowed";
@@ -132,7 +132,7 @@ pub enum Token {
/// A serialized `String`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// let s = String::from("owned");
@@ -151,7 +151,7 @@ pub enum Token {
/// A serialized `Option<T>` containing none.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// let opt = None::<char>;
@@ -163,20 +163,17 @@ pub enum Token {
///
/// The tokens of the value follow after this header.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// let opt = Some('c');
- /// assert_tokens(&opt, &[
- /// Token::Some,
- /// Token::Char('c'),
- /// ]);
+ /// assert_tokens(&opt, &[Token::Some, Token::Char('c')]);
/// ```
Some,
/// A serialized `()`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// assert_tokens(&(), &[Token::Unit]);
@@ -185,8 +182,8 @@ pub enum Token {
/// A serialized unit struct of the given name.
///
- /// ```edition2018
- /// # use serde::{Serialize, Deserialize};
+ /// ```
+ /// # use serde_derive::{Deserialize, Serialize};
/// # use serde_test::{assert_tokens, Token};
/// #
/// # fn main() {
@@ -200,8 +197,8 @@ pub enum Token {
/// A unit variant of an enum.
///
- /// ```edition2018
- /// # use serde::{Serialize, Deserialize};
+ /// ```
+ /// # use serde_derive::{Deserialize, Serialize};
/// # use serde_test::{assert_tokens, Token};
/// #
/// # fn main() {
@@ -211,7 +208,13 @@ pub enum Token {
/// }
///
/// let a = E::A;
- /// assert_tokens(&a, &[Token::UnitVariant { name: "E", variant: "A" }]);
+ /// assert_tokens(
+ /// &a,
+ /// &[Token::UnitVariant {
+ /// name: "E",
+ /// variant: "A",
+ /// }],
+ /// );
/// # }
/// ```
UnitVariant {
@@ -223,8 +226,8 @@ pub enum Token {
///
/// After this header is the value contained in the newtype struct.
///
- /// ```edition2018
- /// # use serde::{Serialize, Deserialize};
+ /// ```
+ /// # use serde_derive::{Deserialize, Serialize};
/// # use serde_test::{assert_tokens, Token};
/// #
/// # fn main() {
@@ -232,10 +235,10 @@ pub enum Token {
/// struct N(String);
///
/// let n = N("newtype".to_owned());
- /// assert_tokens(&n, &[
- /// Token::NewtypeStruct { name: "N" },
- /// Token::String("newtype"),
- /// ]);
+ /// assert_tokens(
+ /// &n,
+ /// &[Token::NewtypeStruct { name: "N" }, Token::String("newtype")],
+ /// );
/// # }
/// ```
NewtypeStruct { name: &'static str },
@@ -244,8 +247,8 @@ pub enum Token {
///
/// After this header is the value contained in the newtype variant.
///
- /// ```edition2018
- /// # use serde::{Serialize, Deserialize};
+ /// ```
+ /// # use serde_derive::{Deserialize, Serialize};
/// # use serde_test::{assert_tokens, Token};
/// #
/// # fn main() {
@@ -255,10 +258,16 @@ pub enum Token {
/// }
///
/// let b = E::B(0);
- /// assert_tokens(&b, &[
- /// Token::NewtypeVariant { name: "E", variant: "B" },
- /// Token::U8(0),
- /// ]);
+ /// assert_tokens(
+ /// &b,
+ /// &[
+ /// Token::NewtypeVariant {
+ /// name: "E",
+ /// variant: "B",
+ /// },
+ /// Token::U8(0),
+ /// ],
+ /// );
/// # }
/// ```
NewtypeVariant {
@@ -271,17 +280,20 @@ pub enum Token {
/// After this header are the elements of the sequence, followed by
/// `SeqEnd`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// let vec = vec!['a', 'b', 'c'];
- /// assert_tokens(&vec, &[
- /// Token::Seq { len: Some(3) },
- /// Token::Char('a'),
- /// Token::Char('b'),
- /// Token::Char('c'),
- /// Token::SeqEnd,
- /// ]);
+ /// assert_tokens(
+ /// &vec,
+ /// &[
+ /// Token::Seq { len: Some(3) },
+ /// Token::Char('a'),
+ /// Token::Char('b'),
+ /// Token::Char('c'),
+ /// Token::SeqEnd,
+ /// ],
+ /// );
/// ```
Seq { len: Option<usize> },
@@ -292,16 +304,19 @@ pub enum Token {
///
/// After this header are the elements of the tuple, followed by `TupleEnd`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// let tuple = ('a', 100);
- /// assert_tokens(&tuple, &[
- /// Token::Tuple { len: 2 },
- /// Token::Char('a'),
- /// Token::I32(100),
- /// Token::TupleEnd,
- /// ]);
+ /// assert_tokens(
+ /// &tuple,
+ /// &[
+ /// Token::Tuple { len: 2 },
+ /// Token::Char('a'),
+ /// Token::I32(100),
+ /// Token::TupleEnd,
+ /// ],
+ /// );
/// ```
Tuple { len: usize },
@@ -313,8 +328,8 @@ pub enum Token {
/// After this header are the fields of the tuple struct, followed by
/// `TupleStructEnd`.
///
- /// ```edition2018
- /// # use serde::{Serialize, Deserialize};
+ /// ```
+ /// # use serde_derive::{Deserialize, Serialize};
/// # use serde_test::{assert_tokens, Token};
/// #
/// # fn main() {
@@ -322,12 +337,15 @@ pub enum Token {
/// struct T(u8, u8);
///
/// let t = T(0, 0);
- /// assert_tokens(&t, &[
- /// Token::TupleStruct { name: "T", len: 2 },
- /// Token::U8(0),
- /// Token::U8(0),
- /// Token::TupleStructEnd,
- /// ]);
+ /// assert_tokens(
+ /// &t,
+ /// &[
+ /// Token::TupleStruct { name: "T", len: 2 },
+ /// Token::U8(0),
+ /// Token::U8(0),
+ /// Token::TupleStructEnd,
+ /// ],
+ /// );
/// # }
/// ```
TupleStruct { name: &'static str, len: usize },
@@ -340,8 +358,8 @@ pub enum Token {
/// After this header are the fields of the tuple variant, followed by
/// `TupleVariantEnd`.
///
- /// ```edition2018
- /// # use serde::{Serialize, Deserialize};
+ /// ```
+ /// # use serde_derive::{Deserialize, Serialize};
/// # use serde_test::{assert_tokens, Token};
/// #
/// # fn main() {
@@ -351,12 +369,19 @@ pub enum Token {
/// }
///
/// let c = E::C(0, 0);
- /// assert_tokens(&c, &[
- /// Token::TupleVariant { name: "E", variant: "C", len: 2 },
- /// Token::U8(0),
- /// Token::U8(0),
- /// Token::TupleVariantEnd,
- /// ]);
+ /// assert_tokens(
+ /// &c,
+ /// &[
+ /// Token::TupleVariant {
+ /// name: "E",
+ /// variant: "C",
+ /// len: 2,
+ /// },
+ /// Token::U8(0),
+ /// Token::U8(0),
+ /// Token::TupleVariantEnd,
+ /// ],
+ /// );
/// # }
/// ```
TupleVariant {
@@ -372,7 +397,7 @@ pub enum Token {
///
/// After this header are the entries of the map, followed by `MapEnd`.
///
- /// ```edition2018
+ /// ```
/// # use serde_test::{assert_tokens, Token};
/// #
/// use std::collections::BTreeMap;
@@ -381,14 +406,17 @@ pub enum Token {
/// map.insert('A', 65);
/// map.insert('Z', 90);
///
- /// assert_tokens(&map, &[
- /// Token::Map { len: Some(2) },
- /// Token::Char('A'),
- /// Token::I32(65),
- /// Token::Char('Z'),
- /// Token::I32(90),
- /// Token::MapEnd,
- /// ]);
+ /// assert_tokens(
+ /// &map,
+ /// &[
+ /// Token::Map { len: Some(2) },
+ /// Token::Char('A'),
+ /// Token::I32(65),
+ /// Token::Char('Z'),
+ /// Token::I32(90),
+ /// Token::MapEnd,
+ /// ],
+ /// );
/// ```
Map { len: Option<usize> },
@@ -399,8 +427,8 @@ pub enum Token {
///
/// After this header are the fields of the struct, followed by `StructEnd`.
///
- /// ```edition2018
- /// # use serde::{Serialize, Deserialize};
+ /// ```
+ /// # use serde_derive::{Deserialize, Serialize};
/// # use serde_test::{assert_tokens, Token};
/// #
/// # fn main() {
@@ -411,14 +439,17 @@ pub enum Token {
/// }
///
/// let s = S { a: 0, b: 0 };
- /// assert_tokens(&s, &[
- /// Token::Struct { name: "S", len: 2 },
- /// Token::Str("a"),
- /// Token::U8(0),
- /// Token::Str("b"),
- /// Token::U8(0),
- /// Token::StructEnd,
- /// ]);
+ /// assert_tokens(
+ /// &s,
+ /// &[
+ /// Token::Struct { name: "S", len: 2 },
+ /// Token::Str("a"),
+ /// Token::U8(0),
+ /// Token::Str("b"),
+ /// Token::U8(0),
+ /// Token::StructEnd,
+ /// ],
+ /// );
/// # }
/// ```
Struct { name: &'static str, len: usize },
@@ -431,8 +462,8 @@ pub enum Token {
/// After this header are the fields of the struct variant, followed by
/// `StructVariantEnd`.
///
- /// ```edition2018
- /// # use serde::{Serialize, Deserialize};
+ /// ```
+ /// # use serde_derive::{Deserialize, Serialize};
/// # use serde_test::{assert_tokens, Token};
/// #
/// # fn main() {
@@ -442,12 +473,19 @@ pub enum Token {
/// }
///
/// let d = E::D { d: 0 };
- /// assert_tokens(&d, &[
- /// Token::StructVariant { name: "E", variant: "D", len: 1 },
- /// Token::Str("d"),
- /// Token::U8(0),
- /// Token::StructVariantEnd,
- /// ]);
+ /// assert_tokens(
+ /// &d,
+ /// &[
+ /// Token::StructVariant {
+ /// name: "E",
+ /// variant: "D",
+ /// len: 1,
+ /// },
+ /// Token::Str("d"),
+ /// Token::U8(0),
+ /// Token::StructVariantEnd,
+ /// ],
+ /// );
/// # }
/// ```
StructVariant {
@@ -461,8 +499,8 @@ pub enum Token {
/// The header to an enum of the given name.
///
- /// ```edition2018
- /// # use serde::{Serialize, Deserialize};
+ /// ```
+ /// # use serde_derive::{Deserialize, Serialize};
/// # use serde_test::{assert_tokens, Token};
/// #
/// # fn main() {
@@ -475,38 +513,42 @@ pub enum Token {
/// }
///
/// let a = E::A;
- /// assert_tokens(&a, &[
- /// Token::Enum { name: "E" },
- /// Token::Str("A"),
- /// Token::Unit,
- /// ]);
+ /// assert_tokens(
+ /// &a,
+ /// &[Token::Enum { name: "E" }, Token::Str("A"), Token::Unit],
+ /// );
///
/// let b = E::B(0);
- /// assert_tokens(&b, &[
- /// Token::Enum { name: "E" },
- /// Token::Str("B"),
- /// Token::U8(0),
- /// ]);
+ /// assert_tokens(
+ /// &b,
+ /// &[Token::Enum { name: "E" }, Token::Str("B"), Token::U8(0)],
+ /// );
///
/// let c = E::C(0, 0);
- /// assert_tokens(&c, &[
- /// Token::Enum { name: "E" },
- /// Token::Str("C"),
- /// Token::Seq { len: Some(2) },
- /// Token::U8(0),
- /// Token::U8(0),
- /// Token::SeqEnd,
- /// ]);
+ /// assert_tokens(
+ /// &c,
+ /// &[
+ /// Token::Enum { name: "E" },
+ /// Token::Str("C"),
+ /// Token::Seq { len: Some(2) },
+ /// Token::U8(0),
+ /// Token::U8(0),
+ /// Token::SeqEnd,
+ /// ],
+ /// );
///
/// let d = E::D { d: 0 };
- /// assert_tokens(&d, &[
- /// Token::Enum { name: "E" },
- /// Token::Str("D"),
- /// Token::Map { len: Some(1) },
- /// Token::Str("d"),
- /// Token::U8(0),
- /// Token::MapEnd,
- /// ]);
+ /// assert_tokens(
+ /// &d,
+ /// &[
+ /// Token::Enum { name: "E" },
+ /// Token::Str("D"),
+ /// Token::Map { len: Some(1) },
+ /// Token::Str("d"),
+ /// Token::U8(0),
+ /// Token::MapEnd,
+ /// ],
+ /// );
/// # }
/// ```
Enum { name: &'static str },