jsondb/src/tests.rs

413 lines
11 KiB
Rust

use std::{collections::HashMap, fs::File};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use serde_with::serde_as;
use tempfile::tempdir;
use time::OffsetDateTime;
use super::{JsonDb, Schema, SchemaV0};
#[derive(Default, Debug, PartialEq, Eq, Deserialize, Serialize)]
struct V0 {
name: String,
}
impl SchemaV0 for V0 {}
#[derive(Default, Debug, PartialEq, Eq, Deserialize, Serialize)]
struct V1 {
name: String,
#[serde(default)]
gender: Option<String>,
last_updated: i64,
}
impl Schema for V1 {
type Prev = V0;
}
impl From<V0> for V1 {
fn from(old: V0) -> Self {
V1 {
name: old.name,
gender: None,
last_updated: 0,
}
}
}
#[serde_as]
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct V2 {
name: String,
#[serde(default)]
gender: Option<String>,
#[serde_as(as = "time::format_description::well_known::Rfc3339")]
last_updated: OffsetDateTime,
}
impl Default for V2 {
fn default() -> Self {
V2 {
name: String::new(),
gender: None,
last_updated: OffsetDateTime::UNIX_EPOCH,
}
}
}
impl Schema for V2 {
type Prev = V1;
}
impl From<V1> for V2 {
fn from(old: V1) -> Self {
V2 {
name: old.name,
gender: old.gender,
last_updated: OffsetDateTime::from_unix_timestamp(old.last_updated).unwrap(),
}
}
}
const V0DATA: &str = r#"{"version":0,"name":"xenofem"}"#;
const V1DATA: &str =
r#"{"version":1,"name":"xenofem","gender":"dress go spinny","last_updated":1660585235}"#;
const V2DATA: &str = r#"{"version":2,"name":"xenofem","gender":"dress go spinny","last_updated":"2022-08-15T17:47:18Z"}"#;
#[test]
fn parse_v0() {
assert_eq!(
V0::parse(V0DATA).unwrap(),
V0 {
name: String::from("xenofem")
},
);
}
#[test]
fn parse_v1() {
assert_eq!(
V1::parse(V1DATA).unwrap(),
V1 {
name: String::from("xenofem"),
gender: Some(String::from("dress go spinny")),
last_updated: 1660585235
},
);
}
#[test]
fn migrate_v0_v1() {
assert_eq!(
V1::parse(V0DATA).unwrap(),
V1 {
name: String::from("xenofem"),
gender: None,
last_updated: 0
},
);
}
#[test]
fn parse_v2() {
assert_eq!(
V2::parse(V2DATA).unwrap(),
V2 {
name: String::from("xenofem"),
gender: Some(String::from("dress go spinny")),
last_updated: OffsetDateTime::from_unix_timestamp(1660585638).unwrap(),
},
);
}
#[test]
fn migrate_v1_v2() {
assert_eq!(
V2::parse(V1DATA).unwrap(),
V2 {
name: String::from("xenofem"),
gender: Some(String::from("dress go spinny")),
last_updated: time::macros::datetime!(2022-08-15 17:40:35 +00:00)
},
);
}
#[test]
fn migrate_v0_v2() {
assert_eq!(
V2::parse(V0DATA).unwrap(),
V2 {
name: String::from("xenofem"),
gender: None,
last_updated: time::macros::datetime!(1970-01-01 00:00:00 +00:00)
},
);
}
#[tokio::test]
async fn async_load_write_migrate() {
let dir = tempdir().unwrap();
let db_file = dir.path().join("test.json");
{
let db0: JsonDb<V0> = JsonDb::load(db_file.clone()).await.unwrap();
db0.flush().await;
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(value["version"], 0);
assert_eq!(&value["name"], "");
{
let mut writer = db0.write().await;
writer.name = String::from("mefonex");
}
{
let reader = db0.read().await;
assert_eq!(reader.name, "mefonex");
}
// Reading also awaits a flush
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(&value["name"], "mefonex");
}
{
let db2: JsonDb<V2> = JsonDb::load(db_file.clone()).await.unwrap();
db2.flush().await;
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(value["version"], 2);
assert_eq!(&value["name"], "mefonex");
assert_eq!(value["gender"], Value::Null);
assert_eq!(&value["last_updated"], "1970-01-01T00:00:00Z");
{
let mut writer = db2.write().await;
writer.last_updated = OffsetDateTime::from_unix_timestamp(1660585638).unwrap();
}
db2.flush().await;
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(&value["last_updated"], "2022-08-15T17:47:18Z");
}
}
#[test]
fn blocking_load_write_migrate() {
let rt = tokio::runtime::Runtime::new().unwrap();
let dir = tempdir().unwrap();
let db_file = dir.path().join("test.json");
{
let db0: JsonDb<V0> = rt.block_on(JsonDb::load(db_file.clone())).unwrap();
db0.blocking_flush();
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(value["version"], 0);
assert_eq!(&value["name"], "");
{
let mut writer = db0.blocking_write();
writer.name = String::from("mefonex");
}
{
let reader = db0.blocking_read();
assert_eq!(reader.name, "mefonex");
}
// Reading also waits for a flush
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(&value["name"], "mefonex");
}
{
let db2: JsonDb<V2> = rt.block_on(JsonDb::load(db_file.clone())).unwrap();
db2.blocking_flush();
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(value["version"], 2);
assert_eq!(&value["name"], "mefonex");
assert_eq!(value["gender"], Value::Null);
assert_eq!(&value["last_updated"], "1970-01-01T00:00:00Z");
{
let mut writer = db2.blocking_write();
writer.last_updated = OffsetDateTime::from_unix_timestamp(1660585638).unwrap();
}
db2.blocking_flush();
let value: Value = serde_json::from_reader(File::open(&db_file).unwrap()).unwrap();
assert_eq!(&value["last_updated"], "2022-08-15T17:47:18Z");
}
}
#[test]
fn catchall_schema_v0() {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall(HashMap<String, String>);
impl SchemaV0 for Catchall {}
let mut data = HashMap::new();
data.insert("hello".into(), "world".into());
data.insert("catch".into(), "all".into());
assert_eq!(
Catchall::parse(r#"{"version":0,"hello":"world","catch":"all"}"#).unwrap(),
Catchall(data)
)
}
#[test]
fn catchall_schema_v1_v2() {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall0(HashMap<String, String>);
impl SchemaV0 for Catchall0 {}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall1 {
count: usize,
#[serde(flatten)]
data: HashMap<String, String>,
}
impl Schema for Catchall1 {
type Prev = Catchall0;
}
impl From<Catchall0> for Catchall1 {
fn from(old: Catchall0) -> Self {
Catchall1 {
data: old.0,
count: 0,
}
}
}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall2 {
count: usize,
name: String,
#[serde(flatten)]
data: HashMap<String, String>,
}
impl Schema for Catchall2 {
type Prev = Catchall1;
}
impl From<Catchall1> for Catchall2 {
fn from(old: Catchall1) -> Self {
Catchall2 {
data: old.data,
count: old.count,
name: String::new(),
}
}
}
let mut data = HashMap::new();
data.insert("hello".into(), "world".into());
data.insert("catch".into(), "all".into());
assert_eq!(
Catchall2::parse(r#"{"version":1,"count":42,"hello":"world","catch":"all"}"#).unwrap(),
Catchall2 {
data,
count: 42,
name: String::new(),
},
)
}
#[test]
fn unversioned() {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Data {
number: usize,
truth: bool,
}
impl SchemaV0 for Data {
const VERSION_OPTIONAL: bool = true;
}
assert_eq!(
Data::parse(r#"{"number":42,"truth":true}"#).unwrap(),
Data {
number: 42,
truth: true,
},
);
}
#[test]
fn catchall_v0_with_optional_version_number_present() {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall(HashMap<String, String>);
impl SchemaV0 for Catchall {
const VERSION_OPTIONAL: bool = true;
}
let mut data = HashMap::new();
data.insert("hello".into(), "world".into());
data.insert("catch".into(), "all".into());
assert_eq!(
Catchall::parse(r#"{"version":0,"hello":"world","catch":"all"}"#).unwrap(),
Catchall(data)
)
}
#[test]
fn catchall_v0_with_optional_version_number_absent() {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall(HashMap<String, String>);
impl SchemaV0 for Catchall {
const VERSION_OPTIONAL: bool = true;
}
let mut data = HashMap::new();
data.insert("hello".into(), "world".into());
data.insert("catch".into(), "all".into());
assert_eq!(
Catchall::parse(r#"{"hello":"world","catch":"all"}"#).unwrap(),
Catchall(data)
)
}
#[test]
fn unversioned_v0_migration() {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall0(HashMap<String, String>);
impl SchemaV0 for Catchall0 {
const VERSION_OPTIONAL: bool = true;
}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall1 {
count: usize,
#[serde(flatten)]
data: HashMap<String, String>,
}
impl Schema for Catchall1 {
type Prev = Catchall0;
}
impl From<Catchall0> for Catchall1 {
fn from(old: Catchall0) -> Self {
Catchall1 {
data: old.0,
count: 0,
}
}
}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
struct Catchall2 {
count: usize,
name: String,
#[serde(flatten)]
data: HashMap<String, String>,
}
impl Schema for Catchall2 {
type Prev = Catchall1;
}
impl From<Catchall1> for Catchall2 {
fn from(old: Catchall1) -> Self {
Catchall2 {
data: old.data,
count: old.count,
name: String::new(),
}
}
}
let mut data = HashMap::new();
data.insert("hello".into(), "world".into());
data.insert("catch".into(), "all".into());
assert_eq!(
Catchall2::parse(r#"{"hello":"world","catch":"all"}"#).unwrap(),
Catchall2 {
data,
count: 0,
name: String::new(),
},
)
}