More fixes around unversioned v0, more tests, move tests to separate file

This commit is contained in:
xenofem 2022-08-16 04:37:35 -04:00
parent 14fd399403
commit b8d31b77d9
3 changed files with 444 additions and 342 deletions

View file

@ -1,6 +1,6 @@
[package] [package]
name = "jsondb" name = "jsondb"
version = "0.3.2" version = "0.4.0"
edition = "2021" edition = "2021"
authors = ["xenofem <xenofem@xeno.science>"] authors = ["xenofem <xenofem@xeno.science>"]
license = "MIT" license = "MIT"

View file

@ -37,6 +37,9 @@ use tokio::{
sync::{mpsc, oneshot, OwnedRwLockReadGuard, OwnedRwLockWriteGuard, RwLock}, sync::{mpsc, oneshot, OwnedRwLockReadGuard, OwnedRwLockWriteGuard, RwLock},
}; };
#[cfg(test)]
mod tests;
/// A JSON-backed &ldquo;&ldquo;&ldquo;database&rdquo;&rdquo;&rdquo;. /// A JSON-backed &ldquo;&ldquo;&ldquo;database&rdquo;&rdquo;&rdquo;.
/// ///
/// This wraps a value that is loaded from a JSON file, automatically /// This wraps a value that is loaded from a JSON file, automatically
@ -64,14 +67,27 @@ enum Request<T> {
/// other fields of the corresponding schema version; earlier versions /// other fields of the corresponding schema version; earlier versions
/// will be migrated to the current version automatically. /// will be migrated to the current version automatically.
pub trait Schema: Send + Sync + Debug + DeserializeOwned + Serialize + 'static { pub trait Schema: Send + Sync + Debug + DeserializeOwned + Serialize + 'static {
/// Previous schema that can be migrated into the new schema /// Previous schema that can be migrated into the new schema.
type Prev: Schema + Into<Self>; type Prev: Schema + Into<Self>;
/// Schema version number /// Schema version number.
const VERSION: u32 = Self::Prev::VERSION + 1; const VERSION: u32 = Self::Prev::VERSION + 1;
/// Whether unversioned data should be parsed as V0, rather than
/// rejected with an error.
const UNVERSIONED_V0: bool = Self::Prev::UNVERSIONED_V0;
fn parse(s: &str) -> Result<Self, Error> { fn parse(s: &str) -> Result<Self, Error> {
let Version { version } = serde_json::from_str(s)?; let version = match serde_json::from_str::<Version>(s)?.version {
Some(v) => v,
None => {
if Self::UNVERSIONED_V0 {
0
} else {
return Err(Error::MissingVersion);
}
}
};
match version.cmp(&Self::VERSION) { match version.cmp(&Self::VERSION) {
Ordering::Less => Ok(Self::Prev::parse(s)?.into()), Ordering::Less => Ok(Self::Prev::parse(s)?.into()),
Ordering::Equal => Ok(serde_json::from_str::<VersionedData<Self>>(s)?.data), Ordering::Equal => Ok(serde_json::from_str::<VersionedData<Self>>(s)?.data),
@ -85,36 +101,28 @@ pub trait Schema: Send + Sync + Debug + DeserializeOwned + Serialize + 'static {
/// Implementing this will automatically implement [`Schema`], with /// Implementing this will automatically implement [`Schema`], with
/// version number `0` and `Self` as the previous version. /// version number `0` and `Self` as the previous version.
pub trait SchemaV0: Send + Sync + Debug + DeserializeOwned + Serialize + 'static { pub trait SchemaV0: Send + Sync + Debug + DeserializeOwned + Serialize + 'static {
/// Set this to false if your version 0 is a pre-`JsonDb` schema /// Set this to `true` if your version 0 data may be stored in a
/// that does not include a version number. /// pre-`JsonDb` format that does not include a version number.
const EXPECT_VERSION_NUMBER: bool = true; /// Note that regardless of this setting, when data is written
/// back to the JSON file, it will always include a version
/// number.
const VERSION_OPTIONAL: bool = false;
} }
impl<T: SchemaV0> Schema for T { impl<T: SchemaV0> Schema for T {
type Prev = Self; type Prev = Self;
const VERSION: u32 = 0; const VERSION: u32 = 0;
const UNVERSIONED_V0: bool = Self::VERSION_OPTIONAL;
fn parse(s: &str) -> Result<Self, Error> {
if Self::EXPECT_VERSION_NUMBER {
let Version { version } = serde_json::from_str(s)?;
if version != 0 {
return Err(Error::UnknownVersion(version));
}
Ok(serde_json::from_str::<VersionedData<Self>>(s)?.data)
} else {
Ok(serde_json::from_str(s)?)
}
}
} }
#[derive(Deserialize)] #[derive(Deserialize)]
struct Version { struct Version {
version: u32, version: Option<u32>,
} }
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
struct VersionedData<T> { struct VersionedData<T> {
version: u32, version: Option<u32>,
#[serde(flatten)] #[serde(flatten)]
data: T, data: T,
} }
@ -128,6 +136,8 @@ pub enum Error {
Json(#[from] serde_json::Error), Json(#[from] serde_json::Error),
#[error("Unknown schema version {0}")] #[error("Unknown schema version {0}")]
UnknownVersion(u32), UnknownVersion(u32),
#[error("Missing schema version")]
MissingVersion,
} }
impl<T: Schema + Default> JsonDb<T> { impl<T: Schema + Default> JsonDb<T> {
@ -148,7 +158,7 @@ async fn save<T: Schema>(data: &T, path: &Path) -> Result<(), Error> {
let mut temp_file = File::create(&temp_file_path).await?; let mut temp_file = File::create(&temp_file_path).await?;
temp_file temp_file
.write_all(&serde_json::to_vec_pretty(&VersionedData { .write_all(&serde_json::to_vec_pretty(&VersionedData {
version: T::VERSION, version: Some(T::VERSION),
data, data,
})?) })?)
.await?; .await?;
@ -296,323 +306,3 @@ impl<T: Schema> JsonDb<T> {
.expect("Failed to receive flush confirmation"); .expect("Failed to receive flush confirmation");
} }
} }
#[cfg(test)]
mod tests {
use std::{collections::HashMap, fs::File};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use serde_with::serde_as;
use tempfile::tempdir;
use time::OffsetDateTime;
use super::{JsonDb, Schema, SchemaV0};
#[derive(Default, Debug, PartialEq, Eq, Deserialize, Serialize)]
struct V0 {
name: String,
}
impl SchemaV0 for V0 {}
#[derive(Default, Debug, PartialEq, Eq, Deserialize, Serialize)]
struct V1 {
name: String,
#[serde(default)]
gender: Option<String>,
last_updated: i64,
}
impl Schema for V1 {
type Prev = V0;
}
impl From<V0> for V1 {
fn from(old: V0) -> Self {
V1 {
name: old.name,
gender: None,
last_updated: 0,
}
}
}
#[serde_as]
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct V2 {
name: String,
#[serde(default)]
gender: Option<String>,
#[serde_as(as = "time::format_description::well_known::Rfc3339")]
last_updated: OffsetDateTime,
}
impl Default for V2 {
fn default() -> Self {
V2 {
name: String::new(),
gender: None,
last_updated: OffsetDateTime::UNIX_EPOCH,
}
}
}
impl Schema for V2 {
type Prev = V1;
}
impl From<V1> for V2 {
fn from(old: V1) -> Self {
V2 {
name: old.name,
gender: old.gender,
last_updated: OffsetDateTime::from_unix_timestamp(old.last_updated).unwrap(),
}
}
}
const V0DATA: &str = r#"{"version":0,"name":"xenofem"}"#;
const V1DATA: &str =
r#"{"version":1,"name":"xenofem","gender":"dress go spinny","last_updated":1660585235}"#;
const V2DATA: &str = r#"{"version":2,"name":"xenofem","gender":"dress go spinny","last_updated":"2022-08-15T17:47:18Z"}"#;
#[test]
fn parse_v0() {
assert_eq!(
V0::parse(V0DATA).unwrap(),
V0 {
name: String::from("xenofem")
},
);
}
#[test]
fn parse_v1() {
assert_eq!(
V1::parse(V1DATA).unwrap(),
V1 {
name: String::from("xenofem"),
gender: Some(String::from("dress go spinny")),
last_updated: 1660585235
},
);
}
#[test]
fn migrate_v0_v1() {
assert_eq!(
V1::parse(V0DATA).unwrap(),
V1 {
name: String::from("xenofem"),
gender: None,
last_updated: 0
},
);
}
#[test]
fn parse_v2() {
assert_eq!(
V2::parse(V2DATA).unwrap(),
V2 {
name: String::from("xenofem"),
gender: Some(String::from("dress go spinny")),
last_updated: OffsetDateTime::from_unix_timestamp(1660585638).unwrap(),
},
);
}
#[test]
fn migrate_v1_v2() {
assert_eq!(
V2::parse(V1DATA).unwrap(),
V2 {
name: String::from("xenofem"),
gender: Some(String::from("dress go spinny")),
last_updated: time::macros::datetime!(2022-08-15 17:40:35 +00:00)
},
);
}
#[test]
fn migrate_v0_v2() {
assert_eq!(
V2::parse(V0DATA).unwrap(),
V2 {
name: String::from("xenofem"),
gender: None,
last_updated: time::macros::datetime!(1970-01-01 00:00:00 +00:00)
},
);
}
#[tokio::test]
async fn async_load_write_migrate() {
let dir = tempdir().unwrap();
let db_file = dir.path().join("test.json");
{
let db0: JsonDb<V0> = JsonDb::load(db_file.clone()).await.unwrap();
db0.flush().await;
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(value["version"], 0);
assert_eq!(&value["name"], "");
{
let mut writer = db0.write().await;
writer.name = String::from("mefonex");
}
{
let reader = db0.read().await;
assert_eq!(reader.name, "mefonex");
}
// Reading also awaits a flush
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(&value["name"], "mefonex");
}
{
let db2: JsonDb<V2> = JsonDb::load(db_file.clone()).await.unwrap();
db2.flush().await;
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(value["version"], 2);
assert_eq!(&value["name"], "mefonex");
assert_eq!(value["gender"], Value::Null);
assert_eq!(&value["last_updated"], "1970-01-01T00:00:00Z");
{
let mut writer = db2.write().await;
writer.last_updated = OffsetDateTime::from_unix_timestamp(1660585638).unwrap();
}
db2.flush().await;
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(&value["last_updated"], "2022-08-15T17:47:18Z");
}
}
#[test]
fn blocking_load_write_migrate() {
let rt = tokio::runtime::Runtime::new().unwrap();
let dir = tempdir().unwrap();
let db_file = dir.path().join("test.json");
{
let db0: JsonDb<V0> = rt.block_on(JsonDb::load(db_file.clone())).unwrap();
db0.blocking_flush();
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(value["version"], 0);
assert_eq!(&value["name"], "");
{
let mut writer = db0.blocking_write();
writer.name = String::from("mefonex");
}
{
let reader = db0.blocking_read();
assert_eq!(reader.name, "mefonex");
}
// Reading also waits for a flush
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(&value["name"], "mefonex");
}
{
let db2: JsonDb<V2> = rt.block_on(JsonDb::load(db_file.clone())).unwrap();
db2.blocking_flush();
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(value["version"], 2);
assert_eq!(&value["name"], "mefonex");
assert_eq!(value["gender"], Value::Null);
assert_eq!(&value["last_updated"], "1970-01-01T00:00:00Z");
{
let mut writer = db2.blocking_write();
writer.last_updated = OffsetDateTime::from_unix_timestamp(1660585638).unwrap();
}
db2.blocking_flush();
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(&value["last_updated"], "2022-08-15T17:47:18Z");
}
}
#[test]
fn catchall_schema_v0() {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall(HashMap<String, String>);
impl SchemaV0 for Catchall {}
let mut data = HashMap::new();
data.insert("hello".into(), "world".into());
data.insert("catch".into(), "all".into());
assert_eq!(
Catchall::parse(r#"{"version":0,"hello":"world","catch":"all"}"#).unwrap(),
Catchall(data)
)
}
#[test]
fn catchall_schema_v1_v2() {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall0(HashMap<String, String>);
impl SchemaV0 for Catchall0 {}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall1 {
count: usize,
#[serde(flatten)]
data: HashMap<String, String>,
}
impl Schema for Catchall1 {
type Prev = Catchall0;
}
impl From<Catchall0> for Catchall1 {
fn from(old: Catchall0) -> Self {
Catchall1 {
data: old.0,
count: 0,
}
}
}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall2 {
count: usize,
name: String,
#[serde(flatten)]
data: HashMap<String, String>,
}
impl Schema for Catchall2 {
type Prev = Catchall1;
}
impl From<Catchall1> for Catchall2 {
fn from(old: Catchall1) -> Self {
Catchall2 {
data: old.data,
count: old.count,
name: String::new(),
}
}
}
let mut data = HashMap::new();
data.insert("hello".into(), "world".into());
data.insert("catch".into(), "all".into());
assert_eq!(
Catchall2::parse(r#"{"version":1,"count":42,"hello":"world","catch":"all"}"#).unwrap(),
Catchall2 {
data,
count: 42,
name: String::new(),
},
)
}
#[test]
fn unversioned() {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Data {
number: usize,
truth: bool,
}
impl SchemaV0 for Data {
const EXPECT_VERSION_NUMBER: bool = false;
}
assert_eq!(
Data::parse(r#"{"number":42,"truth":true}"#).unwrap(),
Data {
number: 42,
truth: true,
},
);
}
}

412
src/tests.rs Normal file
View file

@ -0,0 +1,412 @@
use std::{collections::HashMap, fs::File};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use serde_with::serde_as;
use tempfile::tempdir;
use time::OffsetDateTime;
use super::{JsonDb, Schema, SchemaV0};
#[derive(Default, Debug, PartialEq, Eq, Deserialize, Serialize)]
struct V0 {
name: String,
}
impl SchemaV0 for V0 {}
#[derive(Default, Debug, PartialEq, Eq, Deserialize, Serialize)]
struct V1 {
name: String,
#[serde(default)]
gender: Option<String>,
last_updated: i64,
}
impl Schema for V1 {
type Prev = V0;
}
impl From<V0> for V1 {
fn from(old: V0) -> Self {
V1 {
name: old.name,
gender: None,
last_updated: 0,
}
}
}
#[serde_as]
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct V2 {
name: String,
#[serde(default)]
gender: Option<String>,
#[serde_as(as = "time::format_description::well_known::Rfc3339")]
last_updated: OffsetDateTime,
}
impl Default for V2 {
fn default() -> Self {
V2 {
name: String::new(),
gender: None,
last_updated: OffsetDateTime::UNIX_EPOCH,
}
}
}
impl Schema for V2 {
type Prev = V1;
}
impl From<V1> for V2 {
fn from(old: V1) -> Self {
V2 {
name: old.name,
gender: old.gender,
last_updated: OffsetDateTime::from_unix_timestamp(old.last_updated).unwrap(),
}
}
}
const V0DATA: &str = r#"{"version":0,"name":"xenofem"}"#;
const V1DATA: &str =
r#"{"version":1,"name":"xenofem","gender":"dress go spinny","last_updated":1660585235}"#;
const V2DATA: &str = r#"{"version":2,"name":"xenofem","gender":"dress go spinny","last_updated":"2022-08-15T17:47:18Z"}"#;
#[test]
fn parse_v0() {
assert_eq!(
V0::parse(V0DATA).unwrap(),
V0 {
name: String::from("xenofem")
},
);
}
#[test]
fn parse_v1() {
assert_eq!(
V1::parse(V1DATA).unwrap(),
V1 {
name: String::from("xenofem"),
gender: Some(String::from("dress go spinny")),
last_updated: 1660585235
},
);
}
#[test]
fn migrate_v0_v1() {
assert_eq!(
V1::parse(V0DATA).unwrap(),
V1 {
name: String::from("xenofem"),
gender: None,
last_updated: 0
},
);
}
#[test]
fn parse_v2() {
assert_eq!(
V2::parse(V2DATA).unwrap(),
V2 {
name: String::from("xenofem"),
gender: Some(String::from("dress go spinny")),
last_updated: OffsetDateTime::from_unix_timestamp(1660585638).unwrap(),
},
);
}
#[test]
fn migrate_v1_v2() {
assert_eq!(
V2::parse(V1DATA).unwrap(),
V2 {
name: String::from("xenofem"),
gender: Some(String::from("dress go spinny")),
last_updated: time::macros::datetime!(2022-08-15 17:40:35 +00:00)
},
);
}
#[test]
fn migrate_v0_v2() {
assert_eq!(
V2::parse(V0DATA).unwrap(),
V2 {
name: String::from("xenofem"),
gender: None,
last_updated: time::macros::datetime!(1970-01-01 00:00:00 +00:00)
},
);
}
#[tokio::test]
async fn async_load_write_migrate() {
let dir = tempdir().unwrap();
let db_file = dir.path().join("test.json");
{
let db0: JsonDb<V0> = JsonDb::load(db_file.clone()).await.unwrap();
db0.flush().await;
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(value["version"], 0);
assert_eq!(&value["name"], "");
{
let mut writer = db0.write().await;
writer.name = String::from("mefonex");
}
{
let reader = db0.read().await;
assert_eq!(reader.name, "mefonex");
}
// Reading also awaits a flush
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(&value["name"], "mefonex");
}
{
let db2: JsonDb<V2> = JsonDb::load(db_file.clone()).await.unwrap();
db2.flush().await;
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(value["version"], 2);
assert_eq!(&value["name"], "mefonex");
assert_eq!(value["gender"], Value::Null);
assert_eq!(&value["last_updated"], "1970-01-01T00:00:00Z");
{
let mut writer = db2.write().await;
writer.last_updated = OffsetDateTime::from_unix_timestamp(1660585638).unwrap();
}
db2.flush().await;
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(&value["last_updated"], "2022-08-15T17:47:18Z");
}
}
#[test]
fn blocking_load_write_migrate() {
let rt = tokio::runtime::Runtime::new().unwrap();
let dir = tempdir().unwrap();
let db_file = dir.path().join("test.json");
{
let db0: JsonDb<V0> = rt.block_on(JsonDb::load(db_file.clone())).unwrap();
db0.blocking_flush();
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(value["version"], 0);
assert_eq!(&value["name"], "");
{
let mut writer = db0.blocking_write();
writer.name = String::from("mefonex");
}
{
let reader = db0.blocking_read();
assert_eq!(reader.name, "mefonex");
}
// Reading also waits for a flush
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(&value["name"], "mefonex");
}
{
let db2: JsonDb<V2> = rt.block_on(JsonDb::load(db_file.clone())).unwrap();
db2.blocking_flush();
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(value["version"], 2);
assert_eq!(&value["name"], "mefonex");
assert_eq!(value["gender"], Value::Null);
assert_eq!(&value["last_updated"], "1970-01-01T00:00:00Z");
{
let mut writer = db2.blocking_write();
writer.last_updated = OffsetDateTime::from_unix_timestamp(1660585638).unwrap();
}
db2.blocking_flush();
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(&value["last_updated"], "2022-08-15T17:47:18Z");
}
}
#[test]
fn catchall_schema_v0() {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall(HashMap<String, String>);
impl SchemaV0 for Catchall {}
let mut data = HashMap::new();
data.insert("hello".into(), "world".into());
data.insert("catch".into(), "all".into());
assert_eq!(
Catchall::parse(r#"{"version":0,"hello":"world","catch":"all"}"#).unwrap(),
Catchall(data)
)
}
#[test]
fn catchall_schema_v1_v2() {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall0(HashMap<String, String>);
impl SchemaV0 for Catchall0 {}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall1 {
count: usize,
#[serde(flatten)]
data: HashMap<String, String>,
}
impl Schema for Catchall1 {
type Prev = Catchall0;
}
impl From<Catchall0> for Catchall1 {
fn from(old: Catchall0) -> Self {
Catchall1 {
data: old.0,
count: 0,
}
}
}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall2 {
count: usize,
name: String,
#[serde(flatten)]
data: HashMap<String, String>,
}
impl Schema for Catchall2 {
type Prev = Catchall1;
}
impl From<Catchall1> for Catchall2 {
fn from(old: Catchall1) -> Self {
Catchall2 {
data: old.data,
count: old.count,
name: String::new(),
}
}
}
let mut data = HashMap::new();
data.insert("hello".into(), "world".into());
data.insert("catch".into(), "all".into());
assert_eq!(
Catchall2::parse(r#"{"version":1,"count":42,"hello":"world","catch":"all"}"#).unwrap(),
Catchall2 {
data,
count: 42,
name: String::new(),
},
)
}
#[test]
fn unversioned() {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Data {
number: usize,
truth: bool,
}
impl SchemaV0 for Data {
const VERSION_OPTIONAL: bool = true;
}
assert_eq!(
Data::parse(r#"{"number":42,"truth":true}"#).unwrap(),
Data {
number: 42,
truth: true,
},
);
}
#[test]
fn catchall_v0_with_optional_version_number_present() {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall(HashMap<String, String>);
impl SchemaV0 for Catchall {
const VERSION_OPTIONAL: bool = true;
}
let mut data = HashMap::new();
data.insert("hello".into(), "world".into());
data.insert("catch".into(), "all".into());
assert_eq!(
Catchall::parse(r#"{"version":0,"hello":"world","catch":"all"}"#).unwrap(),
Catchall(data)
)
}
#[test]
fn catchall_v0_with_optional_version_number_absent() {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall(HashMap<String, String>);
impl SchemaV0 for Catchall {
const VERSION_OPTIONAL: bool = true;
}
let mut data = HashMap::new();
data.insert("hello".into(), "world".into());
data.insert("catch".into(), "all".into());
assert_eq!(
Catchall::parse(r#"{"hello":"world","catch":"all"}"#).unwrap(),
Catchall(data)
)
}
#[test]
fn unversioned_v0_migration() {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall0(HashMap<String, String>);
impl SchemaV0 for Catchall0 {
const VERSION_OPTIONAL: bool = true;
}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall1 {
count: usize,
#[serde(flatten)]
data: HashMap<String, String>,
}
impl Schema for Catchall1 {
type Prev = Catchall0;
}
impl From<Catchall0> for Catchall1 {
fn from(old: Catchall0) -> Self {
Catchall1 {
data: old.0,
count: 0,
}
}
}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall2 {
count: usize,
name: String,
#[serde(flatten)]
data: HashMap<String, String>,
}
impl Schema for Catchall2 {
type Prev = Catchall1;
}
impl From<Catchall1> for Catchall2 {
fn from(old: Catchall1) -> Self {
Catchall2 {
data: old.data,
count: old.count,
name: String::new(),
}
}
}
let mut data = HashMap::new();
data.insert("hello".into(), "world".into());
data.insert("catch".into(), "all".into());
assert_eq!(
Catchall2::parse(r#"{"hello":"world","catch":"all"}"#).unwrap(),
Catchall2 {
data,
count: 0,
name: String::new(),
},
)
}