make cosmetic changes to state schema, add fileset passwords for forthcoming functionality

This commit is contained in:
xenofem 2022-08-16 06:16:00 -04:00
parent 073feda920
commit aef58d133b
9 changed files with 327 additions and 119 deletions

View file

@ -1,7 +1,6 @@
mod download;
mod state;
mod store;
mod timestamp;
mod upload;
mod zip;
@ -310,7 +309,9 @@ async fn main() -> std::io::Result<()> {
let cachebuster: String = env_or_else("TRANSBEAM_CACHEBUSTER", String::new);
let state_file: PathBuf = match std::env::var("TRANSBEAM_STATE_FILE") {
Ok(v) => v.parse().unwrap_or_else(|_| panic!("Invalid value {} for variable TRANSBEAM_STATE_FILE", v)),
Ok(v) => v
.parse()
.unwrap_or_else(|_| panic!("Invalid value {} for variable TRANSBEAM_STATE_FILE", v)),
Err(_) => {
let legacy_state_file = storage_dir.join("files.json");
if legacy_state_file.is_file() {
@ -322,7 +323,9 @@ async fn main() -> std::io::Result<()> {
};
let data = web::Data::new(AppData {
state: StateDb::load(state_file).await.expect("Failed to load state file"),
state: StateDb::load(state_file)
.await
.expect("Failed to load state file"),
config: Config {
base_url,
max_upload_size,

View file

@ -1,12 +1,109 @@
use jsondb::JsonDb;
mod v0 {
pub type State = crate::store::StoredFiles;
mod prelude {
pub use std::collections::HashMap;
impl jsondb::SchemaV0 for State {
const VERSION_OPTIONAL: bool = true;
pub use jsondb::Schema;
pub use serde::{Deserialize, Serialize};
pub use serde_with::serde_as;
pub use serde_with::skip_serializing_none;
pub use time::OffsetDateTime;
}
mod v0;
pub mod v1 {
//! Schema version 1
//!
//! Changes:
//!
//! * Represent times in RFC3339 format instead of epoch seconds
//! * Ditch some pre-JsonDb ad-hoc migrations
//! * Stored files now have optional passwords for
//! deleting/extending them, stored in plaintext because the
//! threat model doesn't warrant anything stronger
use super::prelude::*;
#[serde_as]
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct UploadedFile {
pub name: String,
pub size: u64,
#[serde_as(as = "time::format_description::well_known::Rfc3339")]
pub modtime: OffsetDateTime,
}
impl From<super::v0::UploadedFile> for UploadedFile {
fn from(old: super::v0::UploadedFile) -> Self {
UploadedFile {
name: old.name,
size: old.size,
modtime: old.modtime,
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct FileSet {
pub files: Vec<UploadedFile>,
// Optional for backwards compatibility only
pub directory_name: Option<String>,
}
impl From<super::v0::FileSet> for FileSet {
fn from(old: super::v0::FileSet) -> Self {
FileSet {
files: old.files.into_iter().map(UploadedFile::from).collect(),
directory_name: old.directory_name,
}
}
}
#[serde_as]
#[skip_serializing_none]
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct StoredFile {
pub name: String,
pub size: u64,
#[serde_as(as = "time::format_description::well_known::Rfc3339")]
pub modtime: OffsetDateTime,
#[serde_as(as = "time::format_description::well_known::Rfc3339")]
pub expiry: OffsetDateTime,
pub contents: Option<FileSet>,
/// None password means the admin page can't be accessed
pub password: Option<String>,
}
impl From<super::v0::StoredFile> for StoredFile {
fn from(old: super::v0::StoredFile) -> Self {
StoredFile {
name: old.name,
size: old.size,
modtime: old.modtime,
expiry: old.expiry,
contents: old.contents.map(FileSet::from),
password: None,
}
}
}
#[derive(Debug, Default, Deserialize, Serialize)]
pub struct StoredFiles(pub HashMap<String, StoredFile>);
pub type State = StoredFiles;
impl Schema for State {
type Prev = super::v0::State;
}
impl From<super::v0::State> for State {
fn from(old: super::v0::State) -> Self {
StoredFiles(
old.0
.into_iter()
.map(|(k, v)| (k, StoredFile::from(v)))
.collect(),
)
}
}
}
pub use v0::State;
pub use v1::State;
pub type StateDb = JsonDb<State>;

92
src/state/v0.rs Normal file
View file

@ -0,0 +1,92 @@
use super::prelude::*;
use serde_with::{FromInto, PickFirst};
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct UploadedFile {
pub name: String,
pub size: u64,
#[serde(with = "timestamp")]
pub modtime: OffsetDateTime,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct FileSet {
pub files: Vec<UploadedFile>,
// Optional for backwards compatibility only
pub directory_name: Option<String>,
}
impl From<Vec<UploadedFile>> for FileSet {
fn from(files: Vec<UploadedFile>) -> Self {
Self {
files,
directory_name: None,
}
}
}
#[serde_as]
#[skip_serializing_none]
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct StoredFile {
pub name: String,
pub size: u64,
#[serde(with = "timestamp")]
pub modtime: OffsetDateTime,
#[serde(with = "timestamp")]
pub expiry: OffsetDateTime,
#[serde_as(as = "Option<PickFirst<(_, FromInto<Vec<UploadedFile>>)>>")]
#[serde(default)]
pub contents: Option<FileSet>,
}
#[derive(Debug, Default, Deserialize, Serialize)]
pub struct StoredFiles(pub HashMap<String, StoredFile>);
pub type State = StoredFiles;
impl jsondb::SchemaV0 for State {
const VERSION_OPTIONAL: bool = true;
}
mod timestamp {
use core::fmt;
use serde::{de::Visitor, Deserializer, Serializer};
use time::OffsetDateTime;
pub(crate) fn serialize<S: Serializer>(
time: &OffsetDateTime,
ser: S,
) -> Result<S::Ok, S::Error> {
ser.serialize_i64(time.unix_timestamp())
}
struct I64Visitor;
impl<'de> Visitor<'de> for I64Visitor {
type Value = i64;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "an integer")
}
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E> {
Ok(v)
}
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E> {
Ok(v as i64)
}
}
pub(crate) fn deserialize<'de, D: Deserializer<'de>>(
de: D,
) -> Result<OffsetDateTime, D::Error> {
Ok(
OffsetDateTime::from_unix_timestamp(de.deserialize_i64(I64Visitor)?)
.unwrap_or_else(|_| OffsetDateTime::now_utc()),
)
}
}

View file

@ -1,7 +1,7 @@
use std::{
collections::HashMap,
io::ErrorKind,
path::{Path, PathBuf}, ops::DerefMut,
ops::DerefMut,
path::{Path, PathBuf},
};
use log::{debug, error, info};
@ -9,15 +9,9 @@ use rand::{
distributions::{Alphanumeric, DistString},
thread_rng, Rng,
};
use serde::{Deserialize, Serialize};
use serde_with::skip_serializing_none;
use serde_with::{serde_as, FromInto, PickFirst};
use time::OffsetDateTime;
use tokio::fs::File;
use crate::upload::UploadedFile;
use crate::zip::FileSet;
const MAX_STORAGE_FILES: usize = 1024;
pub fn gen_storage_code(use_mnemonic: bool) -> String {
@ -34,23 +28,7 @@ pub fn is_valid_storage_code(s: &str) -> bool {
.all(|c| c.is_ascii_alphanumeric() || c == &b'-')
}
#[serde_as]
#[skip_serializing_none]
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct StoredFile {
pub name: String,
pub size: u64,
#[serde(with = "crate::timestamp")]
pub modtime: OffsetDateTime,
#[serde(with = "crate::timestamp")]
pub expiry: OffsetDateTime,
#[serde_as(as = "Option<PickFirst<(_, FromInto<Vec<UploadedFile>>)>>")]
#[serde(default)]
pub contents: Option<FileSet>,
}
#[derive(Debug, Default, Deserialize, Serialize)]
pub struct StoredFiles(pub HashMap<String, StoredFile>);
pub use crate::state::v1::{StoredFile, StoredFiles};
async fn is_valid_entry(key: &str, info: &StoredFile, storage_dir: &Path) -> bool {
if info.expiry < OffsetDateTime::now_utc() {
@ -130,11 +108,7 @@ impl crate::AppData {
/// Attempts to add a file to the store. Returns an I/O error if
/// something's broken, or a u64 of the maximum allowed file size
/// if the file was too big, or a unit if everything worked.
pub async fn add_file(
&self,
key: String,
file: StoredFile,
) -> Result<(), FileAddError> {
pub async fn add_file(&self, key: String, file: StoredFile) -> Result<(), FileAddError> {
let mut store = self.state.write().await;
if store.full(self.config.max_storage_size) {
return Err(FileAddError::Full);

View file

@ -1,33 +0,0 @@
use core::fmt;
use serde::{de::Visitor, Deserializer, Serializer};
use time::OffsetDateTime;
pub(crate) fn serialize<S: Serializer>(time: &OffsetDateTime, ser: S) -> Result<S::Ok, S::Error> {
ser.serialize_i64(time.unix_timestamp())
}
struct I64Visitor;
impl<'de> Visitor<'de> for I64Visitor {
type Value = i64;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "an integer")
}
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E> {
Ok(v)
}
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E> {
Ok(v as i64)
}
}
pub(crate) fn deserialize<'de, D: Deserializer<'de>>(de: D) -> Result<OffsetDateTime, D::Error> {
Ok(
OffsetDateTime::from_unix_timestamp(de.deserialize_i64(I64Visitor)?)
.unwrap_or_else(|_| OffsetDateTime::now_utc()),
)
}

View file

@ -109,13 +109,7 @@ impl Actor for Uploader {
type Context = <Uploader as Actor>::Context;
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct UploadedFile {
pub name: String,
pub size: u64,
#[serde(with = "crate::timestamp")]
pub modtime: OffsetDateTime,
}
pub use crate::state::v1::UploadedFile;
impl UploadedFile {
fn new(name: &str, size: u64, modtime: OffsetDateTime) -> Self {
@ -328,15 +322,14 @@ impl Uploader {
modtime,
expiry: OffsetDateTime::now_utc() + lifetime * time::Duration::DAY,
contents,
password: None,
};
let app_data = self.app_data.clone();
let storage_filename = self.storage_filename.clone();
ctx.spawn(
actix::fut::wrap_future(async move {
debug!("Spawned future to add entry {} to state", storage_filename);
app_data
.add_file(storage_filename, stored_file)
.await
app_data.add_file(storage_filename, stored_file).await
})
.map(|res, u: &mut Self, ctx: &mut Context| match res {
Ok(()) => ctx.text(
@ -406,10 +399,7 @@ impl Uploader {
ctx.wait(
actix::fut::wrap_future(async move {
debug!("Spawned future to remove entry {} from state", filename);
app_data
.remove_file(&filename)
.await
.unwrap();
app_data.remove_file(&filename).await.unwrap();
})
.map(stop_and_flush),
);

View file

@ -2,7 +2,6 @@ use std::io::Write;
use crc32fast::Hasher;
use log::debug;
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use crate::upload::UploadedFile;
@ -28,21 +27,7 @@ const EOCD_TOTAL_SIZE: u64 = EOCD64_RECORD_SIZE + EOCD64_LOCATOR_SIZE + EOCD_REC
const EMPTY_STRING_CRC32: u32 = 0;
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct FileSet {
pub files: Vec<UploadedFile>,
// Optional for backwards compatibility only
pub directory_name: Option<String>,
}
impl From<Vec<UploadedFile>> for FileSet {
fn from(files: Vec<UploadedFile>) -> Self {
Self {
files,
directory_name: None,
}
}
}
pub use crate::state::v1::FileSet;
fn full_file_name_len(file: &UploadedFile, directory_name: &Option<String>) -> u64 {
file.name.len() as u64