cargo clippy and fmt

This commit is contained in:
xenofem 2022-05-01 05:28:50 -04:00
parent 8275b940ac
commit bfe7fcde99
3 changed files with 71 additions and 50 deletions

View file

@ -61,8 +61,9 @@ async fn main() -> std::io::Result<()> {
let data: AppData = web::Data::new(RwLock::new(FileStore::load().await?)); let data: AppData = web::Data::new(RwLock::new(FileStore::load().await?));
start_reaper(data.clone()); start_reaper(data.clone());
let static_dir = let static_dir = PathBuf::from(
PathBuf::from(std::env::var("TRANSBEAM_STATIC_DIR").unwrap_or_else(|_| String::from("static"))); std::env::var("TRANSBEAM_STATIC_DIR").unwrap_or_else(|_| String::from("static")),
);
let port = std::env::var("TRANSBEAM_PORT") let port = std::env::var("TRANSBEAM_PORT")
.ok() .ok()
.and_then(|p| p.parse::<u16>().ok()) .and_then(|p| p.parse::<u16>().ok())

View file

@ -1,7 +1,10 @@
use std::{collections::HashMap, io::ErrorKind, path::PathBuf, str::FromStr}; use std::{collections::HashMap, io::ErrorKind, path::PathBuf, str::FromStr};
use log::{debug, error, info, warn}; use log::{debug, error, info, warn};
use rand::{distributions::{Alphanumeric, DistString}, thread_rng, Rng}; use rand::{
distributions::{Alphanumeric, DistString},
thread_rng, Rng,
};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use time::OffsetDateTime; use time::OffsetDateTime;
use tokio::{ use tokio::{
@ -12,9 +15,9 @@ use tokio::{
const STATE_FILE_NAME: &str = "files.json"; const STATE_FILE_NAME: &str = "files.json";
const DEFAULT_STORAGE_DIR: &str = "storage"; const DEFAULT_STORAGE_DIR: &str = "storage";
const DEFAULT_MAX_LIFETIME: u32 = 30; const DEFAULT_MAX_LIFETIME: u32 = 30;
const GIGA: u64 = 1024*1024*1024; const GIGA: u64 = 1024 * 1024 * 1024;
const DEFAULT_MAX_UPLOAD_SIZE: u64 = 16*GIGA; const DEFAULT_MAX_UPLOAD_SIZE: u64 = 16 * GIGA;
const DEFAULT_MAX_STORAGE_SIZE: u64 = 64*GIGA; const DEFAULT_MAX_STORAGE_SIZE: u64 = 64 * GIGA;
pub fn gen_storage_code() -> String { pub fn gen_storage_code() -> String {
if std::env::var("TRANSBEAM_MNEMONIC_CODES").as_deref() == Ok("false") { if std::env::var("TRANSBEAM_MNEMONIC_CODES").as_deref() == Ok("false") {
@ -25,15 +28,23 @@ pub fn gen_storage_code() -> String {
} }
pub fn is_valid_storage_code(s: &str) -> bool { pub fn is_valid_storage_code(s: &str) -> bool {
s.as_bytes().iter().all(|c| c.is_ascii_alphanumeric() || c == &b'-') s.as_bytes()
.iter()
.all(|c| c.is_ascii_alphanumeric() || c == &b'-')
} }
pub(crate) fn storage_dir() -> PathBuf { pub(crate) fn storage_dir() -> PathBuf {
PathBuf::from(std::env::var("TRANSBEAM_STORAGE_DIR").unwrap_or_else(|_| String::from(DEFAULT_STORAGE_DIR))) PathBuf::from(
std::env::var("TRANSBEAM_STORAGE_DIR")
.unwrap_or_else(|_| String::from(DEFAULT_STORAGE_DIR)),
)
} }
fn parse_env_var<T: FromStr>(var: &str, default: T) -> T { fn parse_env_var<T: FromStr>(var: &str, default: T) -> T {
std::env::var(var).ok().and_then(|val| val.parse::<T>().ok()).unwrap_or(default) std::env::var(var)
.ok()
.and_then(|val| val.parse::<T>().ok())
.unwrap_or(default)
} }
pub(crate) fn max_lifetime() -> u32 { pub(crate) fn max_lifetime() -> u32 {
@ -194,7 +205,9 @@ impl FileStore {
) -> std::io::Result<Result<(), u64>> { ) -> std::io::Result<Result<(), u64>> {
let remaining_size = max_total_size().saturating_sub(self.total_size()); let remaining_size = max_total_size().saturating_sub(self.total_size());
let allowed_size = std::cmp::min(remaining_size, max_single_size()); let allowed_size = std::cmp::min(remaining_size, max_single_size());
if file.size > allowed_size { return Ok(Err(allowed_size)); } if file.size > allowed_size {
return Ok(Err(allowed_size));
}
self.0.insert(key, file); self.0.insert(key, file);
self.save().await.map(Ok) self.save().await.map(Ok)
} }
@ -212,7 +225,7 @@ impl FileStore {
pub(crate) async fn remove_expired_files(&mut self) -> std::io::Result<()> { pub(crate) async fn remove_expired_files(&mut self) -> std::io::Result<()> {
info!("Checking for expired files"); info!("Checking for expired files");
let now = OffsetDateTime::now_utc(); let now = OffsetDateTime::now_utc();
for (key, file) in std::mem::replace(&mut self.0, HashMap::new()).into_iter() { for (key, file) in std::mem::take(&mut self.0).into_iter() {
if file.expiry > now { if file.expiry > now {
self.0.insert(key, file); self.0.insert(key, file);
} else { } else {

View file

@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize};
use time::OffsetDateTime; use time::OffsetDateTime;
use unicode_normalization::UnicodeNormalization; use unicode_normalization::UnicodeNormalization;
use crate::store::{storage_dir, StoredFile, self}; use crate::store::{self, storage_dir, StoredFile};
const MAX_FILES: usize = 256; const MAX_FILES: usize = 256;
const FILENAME_DATE_FORMAT: &[time::format_description::FormatItem] = const FILENAME_DATE_FORMAT: &[time::format_description::FormatItem] =
@ -132,9 +132,15 @@ enum ServerMessage {
impl From<&Error> for ServerMessage { impl From<&Error> for ServerMessage {
fn from(e: &Error) -> Self { fn from(e: &Error) -> Self {
match e { match e {
Error::TooBig(max_size) => ServerMessage::TooBig { max_size: *max_size }, Error::TooBig(max_size) => ServerMessage::TooBig {
Error::TooLong => ServerMessage::TooLong { max_days: store::max_lifetime() }, max_size: *max_size,
_ => ServerMessage::Error { details: e.to_string() }, },
Error::TooLong => ServerMessage::TooLong {
max_days: store::max_lifetime(),
},
_ => ServerMessage::Error {
details: e.to_string(),
},
} }
} }
} }
@ -191,18 +197,17 @@ impl Uploader {
self.cleanup_after_error(ctx); self.cleanup_after_error(ctx);
} }
fn handle_message( fn handle_message(&mut self, msg: ws::Message, ctx: &mut Context) -> Result<bool, Error> {
&mut self,
msg: ws::Message,
ctx: &mut Context,
) -> Result<bool, Error> {
trace!("Websocket message: {:?}", msg); trace!("Websocket message: {:?}", msg);
match msg { match msg {
ws::Message::Text(text) => { ws::Message::Text(text) => {
if self.writer.is_some() { if self.writer.is_some() {
return Err(Error::UnexpectedMessageType); return Err(Error::UnexpectedMessageType);
} }
let UploadManifest { files: raw_files, lifetime, } = serde_json::from_slice(text.as_bytes())?; let UploadManifest {
files: raw_files,
lifetime,
} = serde_json::from_slice(text.as_bytes())?;
if lifetime > store::max_lifetime() { if lifetime > store::max_lifetime() {
return Err(Error::TooLong); return Err(Error::TooLong);
} }
@ -235,19 +240,15 @@ impl Uploader {
.write(true) .write(true)
.create_new(true) .create_new(true)
.open(&storage_path)?; .open(&storage_path)?;
let (writer, name, size, modtime): (Box<dyn Write>,_,_,_) = if files.len() > 1 { let (writer, name, size, modtime): (Box<dyn Write>, _, _, _) = if files.len() > 1 {
info!("Wrapping in zipfile generator"); info!("Wrapping in zipfile generator");
let now = OffsetDateTime::now_utc(); let now = OffsetDateTime::now_utc();
let zip_writer = super::zip::ZipGenerator::new(files, writer); let zip_writer = super::zip::ZipGenerator::new(files, writer);
let size = zip_writer.total_size(); let size = zip_writer.total_size();
let download_filename = let download_filename = super::APP_NAME.to_owned()
super::APP_NAME.to_owned() + &now.format(FILENAME_DATE_FORMAT).unwrap() + ".zip"; + &now.format(FILENAME_DATE_FORMAT).unwrap()
( + ".zip";
Box::new(zip_writer), (Box::new(zip_writer), download_filename, size, now)
download_filename,
size,
now,
)
} else { } else {
( (
Box::new(writer), Box::new(writer),
@ -261,23 +262,29 @@ impl Uploader {
name, name,
size, size,
modtime, modtime,
expiry: OffsetDateTime::now_utc() + lifetime*time::Duration::DAY, expiry: OffsetDateTime::now_utc() + lifetime * time::Duration::DAY,
}; };
let data = self.app_data.clone(); let data = self.app_data.clone();
let storage_filename = self.storage_filename.clone(); let storage_filename = self.storage_filename.clone();
ctx.spawn(actix::fut::wrap_future(async move { ctx.spawn(
actix::fut::wrap_future(async move {
debug!("Spawned future to add entry {} to state", storage_filename); debug!("Spawned future to add entry {} to state", storage_filename);
data.write() data.write()
.await .await
.add_file(storage_filename, stored_file) .add_file(storage_filename, stored_file)
.await .await
}).map(|res, u: &mut Self, ctx: &mut Context| { })
match res { .map(|res, u: &mut Self, ctx: &mut Context| match res {
Ok(Ok(())) => ctx.text(serde_json::to_string(&ServerMessage::Ready { code: u.storage_filename.clone() }).unwrap()), Ok(Ok(())) => ctx.text(
serde_json::to_string(&ServerMessage::Ready {
code: u.storage_filename.clone(),
})
.unwrap(),
),
Ok(Err(size)) => u.notify_error_and_cleanup(Error::TooBig(size), ctx), Ok(Err(size)) => u.notify_error_and_cleanup(Error::TooBig(size), ctx),
Err(e) => u.notify_error_and_cleanup(Error::from(e), ctx) Err(e) => u.notify_error_and_cleanup(Error::from(e), ctx),
} }),
})); );
} }
ws::Message::Binary(data) | ws::Message::Continuation(Item::Last(data)) => { ws::Message::Binary(data) | ws::Message::Continuation(Item::Last(data)) => {
let result = self.handle_data(data)?; let result = self.handle_data(data)?;