refactor config variables, add upload password

This commit is contained in:
xenofem 2022-05-03 16:28:43 -04:00
parent bfe7fcde99
commit eb53030043
13 changed files with 456 additions and 182 deletions

View file

@ -3,20 +3,48 @@ mod store;
mod upload;
mod zip;
use std::{fs::File, path::PathBuf};
use std::{fmt::Debug, fs::File, path::PathBuf, str::FromStr};
use actix_web::{
get, middleware::Logger, web, App, HttpRequest, HttpResponse, HttpServer, Responder,
get, middleware::Logger, post, web, App, HttpRequest, HttpResponse, HttpServer, Responder,
};
use actix_web_actors::ws;
use log::error;
use serde::Deserialize;
use bytesize::ByteSize;
use log::{error, warn};
use serde::{Deserialize, Serialize};
use store::FileStore;
use tokio::sync::RwLock;
const APP_NAME: &str = "transbeam";
type AppData = web::Data<RwLock<FileStore>>;
struct AppState {
file_store: RwLock<FileStore>,
config: Config,
}
struct Config {
max_upload_size: u64,
max_lifetime: u16,
upload_password: String,
storage_dir: PathBuf,
reverse_proxy: bool,
mnemonic_codes: bool,
}
pub fn get_ip_addr(req: &HttpRequest, reverse_proxy: bool) -> String {
let conn_info = req.connection_info();
if reverse_proxy {
conn_info.realip_remote_addr()
} else {
conn_info.peer_addr()
}
.unwrap()
.to_owned()
}
pub fn log_auth_failure(ip_addr: &str) {
warn!("Incorrect authentication attempt from {}", ip_addr);
}
#[derive(Deserialize)]
struct DownloadRequest {
@ -27,16 +55,18 @@ struct DownloadRequest {
async fn handle_download(
req: HttpRequest,
download: web::Query<DownloadRequest>,
data: AppData,
data: web::Data<AppState>,
) -> actix_web::Result<HttpResponse> {
let ip_addr = get_ip_addr(&req, data.config.reverse_proxy);
let code = &download.code;
if !store::is_valid_storage_code(code) {
log_auth_failure(&ip_addr);
return Ok(HttpResponse::NotFound().finish());
}
let data = data.read().await;
let info = data.lookup_file(code);
let file_store = data.file_store.read().await;
let info = file_store.lookup_file(code);
if let Some(info) = info {
let storage_path = store::storage_dir().join(code);
let storage_path = data.config.storage_dir.join(code);
let file = File::open(&storage_path)?;
Ok(download::DownloadingFile {
file,
@ -45,50 +75,153 @@ async fn handle_download(
}
.into_response(&req))
} else {
log_auth_failure(&ip_addr);
Ok(HttpResponse::NotFound().finish())
}
}
#[get("/upload")]
async fn handle_upload(req: HttpRequest, stream: web::Payload, data: AppData) -> impl Responder {
ws::start(upload::Uploader::new(data), &req, stream)
async fn handle_upload(
req: HttpRequest,
stream: web::Payload,
data: web::Data<AppState>,
) -> impl Responder {
if data.file_store.read().await.full() {
return Ok(HttpResponse::BadRequest().finish());
}
let ip_addr = get_ip_addr(&req, data.config.reverse_proxy);
ws::start(upload::Uploader::new(data, ip_addr), &req, stream)
}
#[derive(Deserialize)]
struct UploadPasswordCheck {
password: String,
}
#[post("/upload/check_password")]
async fn check_upload_password(
req: HttpRequest,
body: web::Json<UploadPasswordCheck>,
data: web::Data<AppState>,
) -> impl Responder {
let ip_addr = get_ip_addr(&req, data.config.reverse_proxy);
if body.password != data.config.upload_password {
log_auth_failure(&ip_addr);
HttpResponse::Forbidden().finish()
} else {
HttpResponse::NoContent().finish()
}
}
#[derive(Serialize)]
struct UploadLimits {
open: bool,
max_size: u64,
max_lifetime: u16,
}
#[get("/upload/limits.json")]
async fn upload_limits(data: web::Data<AppState>) -> impl Responder {
let file_store = data.file_store.read().await;
let open = !file_store.full();
let available_size = file_store.available_size();
let max_size = std::cmp::min(available_size, data.config.max_upload_size);
web::Json(UploadLimits {
open,
max_size,
max_lifetime: data.config.max_lifetime,
})
}
fn env_or<T: FromStr>(var: &str, default: T) -> T
where
<T as FromStr>::Err: Debug,
{
std::env::var(var)
.map(|val| {
val.parse::<T>()
.unwrap_or_else(|_| panic!("Invalid value {} for variable {}", val, var))
})
.unwrap_or(default)
}
fn env_or_else<T: FromStr, F: FnOnce() -> T>(var: &str, default: F) -> T
where
<T as FromStr>::Err: Debug,
{
std::env::var(var)
.map(|val| {
val.parse::<T>()
.unwrap_or_else(|_| panic!("Invalid value {} for variable {}", val, var))
})
.ok()
.unwrap_or_else(default)
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
dotenv::dotenv().ok();
env_logger::init();
let data: AppData = web::Data::new(RwLock::new(FileStore::load().await?));
start_reaper(data.clone());
let static_dir: PathBuf = env_or_else("TRANSBEAM_STATIC_DIR", || PathBuf::from("static"));
let storage_dir: PathBuf = env_or_else("TRANSBEAM_STORAGE_DIR", || PathBuf::from("storage"));
let port: u16 = env_or("TRANSBEAM_PORT", 8080);
let mnemonic_codes: bool = env_or("TRANSBEAM_MNEMONIC_CODES", true);
let reverse_proxy: bool = env_or("TRANSBEAM_REVERSE_PROXY", false);
let max_lifetime: u16 = env_or("TRANSBEAM_MAX_LIFETIME", 30);
let max_upload_size: u64 =
env_or::<ByteSize>("TRANSBEAM_MAX_UPLOAD_SIZE", ByteSize(16 * bytesize::GB)).as_u64();
let max_storage_size: u64 =
env_or::<ByteSize>("TRANSBEAM_MAX_STORAGE_SIZE", ByteSize(64 * bytesize::GB)).as_u64();
let upload_password: String =
std::env::var("TRANSBEAM_UPLOAD_PASSWORD").expect("TRANSBEAM_UPLOAD_PASSWORD must be set!");
let static_dir = PathBuf::from(
std::env::var("TRANSBEAM_STATIC_DIR").unwrap_or_else(|_| String::from("static")),
);
let port = std::env::var("TRANSBEAM_PORT")
.ok()
.and_then(|p| p.parse::<u16>().ok())
.unwrap_or(8080);
let data = web::Data::new(AppState {
file_store: RwLock::new(FileStore::load(storage_dir.clone(), max_storage_size).await?),
config: Config {
max_upload_size,
max_lifetime,
upload_password,
storage_dir,
reverse_proxy,
mnemonic_codes,
},
});
start_reaper(data.clone());
HttpServer::new(move || {
App::new()
.app_data(data.clone())
.wrap(Logger::default())
.service(handle_upload)
.wrap(if data.config.reverse_proxy {
Logger::new(r#"%{r}a "%r" %s %b "%{Referer}i" "%{User-Agent}i" %T"#)
} else {
Logger::default()
})
.service(handle_download)
.service(handle_upload)
.service(check_upload_password)
.service(upload_limits)
.service(actix_files::Files::new("/", static_dir.clone()).index_file("index.html"))
})
.bind(("127.0.0.1", port))?
.bind((
if reverse_proxy {
"127.0.0.1"
} else {
"0.0.0.0"
},
port,
))?
.run()
.await?;
Ok(())
}
fn start_reaper(data: AppData) {
fn start_reaper(data: web::Data<AppState>) {
std::thread::spawn(move || {
actix_web::rt::System::new().block_on(async {
loop {
actix_web::rt::time::sleep(core::time::Duration::from_secs(86400)).await;
if let Err(e) = data.write().await.remove_expired_files().await {
if let Err(e) = data.file_store.write().await.remove_expired_files().await {
error!("Error reaping expired files: {}", e);
}
}

View file

@ -1,6 +1,10 @@
use std::{collections::HashMap, io::ErrorKind, path::PathBuf, str::FromStr};
use std::{
collections::HashMap,
io::ErrorKind,
path::{Path, PathBuf},
};
use log::{debug, error, info, warn};
use log::{debug, error, info};
use rand::{
distributions::{Alphanumeric, DistString},
thread_rng, Rng,
@ -13,17 +17,13 @@ use tokio::{
};
const STATE_FILE_NAME: &str = "files.json";
const DEFAULT_STORAGE_DIR: &str = "storage";
const DEFAULT_MAX_LIFETIME: u32 = 30;
const GIGA: u64 = 1024 * 1024 * 1024;
const DEFAULT_MAX_UPLOAD_SIZE: u64 = 16 * GIGA;
const DEFAULT_MAX_STORAGE_SIZE: u64 = 64 * GIGA;
const MAX_STORAGE_FILES: usize = 1024;
pub fn gen_storage_code() -> String {
if std::env::var("TRANSBEAM_MNEMONIC_CODES").as_deref() == Ok("false") {
Alphanumeric.sample_string(&mut thread_rng(), 8)
} else {
pub fn gen_storage_code(use_mnemonic: bool) -> String {
if use_mnemonic {
mnemonic::to_string(thread_rng().gen::<[u8; 4]>())
} else {
Alphanumeric.sample_string(&mut thread_rng(), 8)
}
}
@ -33,32 +33,6 @@ pub fn is_valid_storage_code(s: &str) -> bool {
.all(|c| c.is_ascii_alphanumeric() || c == &b'-')
}
pub(crate) fn storage_dir() -> PathBuf {
PathBuf::from(
std::env::var("TRANSBEAM_STORAGE_DIR")
.unwrap_or_else(|_| String::from(DEFAULT_STORAGE_DIR)),
)
}
fn parse_env_var<T: FromStr>(var: &str, default: T) -> T {
std::env::var(var)
.ok()
.and_then(|val| val.parse::<T>().ok())
.unwrap_or(default)
}
pub(crate) fn max_lifetime() -> u32 {
parse_env_var("TRANSBEAM_MAX_LIFETIME", DEFAULT_MAX_LIFETIME)
}
pub(crate) fn max_single_size() -> u64 {
parse_env_var("TRANSBEAM_MAX_UPLOAD_SIZE", DEFAULT_MAX_UPLOAD_SIZE)
}
pub(crate) fn max_total_size() -> u64 {
parse_env_var("TRANSBEAM_MAX_STORAGE_SIZE", DEFAULT_MAX_STORAGE_SIZE)
}
#[derive(Clone, Deserialize, Serialize)]
pub struct StoredFile {
pub name: String,
@ -110,13 +84,13 @@ pub(crate) mod timestamp {
}
}
async fn is_valid_entry(key: &str, info: &StoredFile) -> bool {
async fn is_valid_entry(key: &str, info: &StoredFile, storage_dir: &Path) -> bool {
if info.expiry < OffsetDateTime::now_utc() {
info!("File {} has expired", key);
return false;
}
let file = if let Ok(f) = File::open(storage_dir().join(&key)).await {
let file = if let Ok(f) = File::open(storage_dir.join(&key)).await {
f
} else {
error!(
@ -141,10 +115,35 @@ async fn is_valid_entry(key: &str, info: &StoredFile) -> bool {
true
}
pub(crate) struct FileStore(HashMap<String, StoredFile>);
async fn delete_file_if_exists(file: &PathBuf) -> std::io::Result<()> {
if let Err(e) = tokio::fs::remove_file(file).await {
if e.kind() != ErrorKind::NotFound {
error!("Failed to delete file {}: {}", file.to_string_lossy(), e);
return Err(e);
}
}
Ok(())
}
#[derive(thiserror::Error, Debug)]
pub enum FileAddError {
#[error("Failed to write metadata to filesystem")]
FileSystem(#[from] std::io::Error),
#[error("File was too large, available space is {0} bytes")]
TooBig(u64),
#[error("File store is full")]
Full,
}
pub struct FileStore {
files: HashMap<String, StoredFile>,
storage_dir: PathBuf,
max_storage_size: u64,
}
impl FileStore {
pub(crate) async fn load() -> std::io::Result<Self> {
let open_result = File::open(storage_dir().join(STATE_FILE_NAME)).await;
pub(crate) async fn load(storage_dir: PathBuf, max_storage_size: u64) -> std::io::Result<Self> {
let open_result = File::open(storage_dir.join(STATE_FILE_NAME)).await;
match open_result {
Ok(mut f) => {
let mut buf = String::new();
@ -160,22 +159,28 @@ impl FileStore {
error!("Invalid key in persistent storage: {}", key);
continue;
}
if is_valid_entry(&key, &info).await {
if is_valid_entry(&key, &info, &storage_dir).await {
filtered.insert(key, info);
} else {
info!("Deleting file {}", key);
if let Err(e) = tokio::fs::remove_file(storage_dir().join(&key)).await {
warn!("Failed to delete file {}: {}", key, e);
}
delete_file_if_exists(&storage_dir.join(&key)).await?;
}
}
let mut loaded = Self(filtered);
let mut loaded = Self {
files: filtered,
storage_dir,
max_storage_size,
};
loaded.save().await?;
Ok(loaded)
}
Err(e) => {
if let ErrorKind::NotFound = e.kind() {
Ok(Self(HashMap::new()))
Ok(Self {
files: HashMap::new(),
storage_dir,
max_storage_size,
})
} else {
Err(e)
}
@ -184,17 +189,25 @@ impl FileStore {
}
fn total_size(&self) -> u64 {
self.0.iter().fold(0, |acc, (_, f)| acc + f.size)
self.files.iter().fold(0, |acc, (_, f)| acc + f.size)
}
pub fn available_size(&self) -> u64 {
self.max_storage_size.saturating_sub(self.total_size())
}
async fn save(&mut self) -> std::io::Result<()> {
info!("saving updated state: {} entries", self.0.len());
File::create(storage_dir().join(STATE_FILE_NAME))
info!("saving updated state: {} entries", self.files.len());
File::create(self.storage_dir.join(STATE_FILE_NAME))
.await?
.write_all(&serde_json::to_vec_pretty(&self.0)?)
.write_all(&serde_json::to_vec_pretty(&self.files)?)
.await
}
pub fn full(&self) -> bool {
self.available_size() == 0 || self.files.len() >= MAX_STORAGE_FILES
}
/// Attempts to add a file to the store. Returns an I/O error if
/// something's broken, or a u64 of the maximum allowed file size
/// if the file was too big, or a unit if everything worked.
@ -202,37 +215,42 @@ impl FileStore {
&mut self,
key: String,
file: StoredFile,
) -> std::io::Result<Result<(), u64>> {
let remaining_size = max_total_size().saturating_sub(self.total_size());
let allowed_size = std::cmp::min(remaining_size, max_single_size());
if file.size > allowed_size {
return Ok(Err(allowed_size));
) -> Result<(), FileAddError> {
if self.full() {
return Err(FileAddError::Full);
}
self.0.insert(key, file);
self.save().await.map(Ok)
let available_size = self.available_size();
if file.size > available_size {
return Err(FileAddError::TooBig(available_size));
}
self.files.insert(key, file);
self.save().await?;
Ok(())
}
pub(crate) fn lookup_file(&self, key: &str) -> Option<StoredFile> {
self.0.get(key).cloned()
self.files.get(key).cloned()
}
pub(crate) async fn remove_file(&mut self, key: &str) -> std::io::Result<()> {
debug!("removing entry {} from state", key);
self.0.remove(key);
self.save().await
self.files.remove(key);
self.save().await?;
if is_valid_storage_code(key) {
delete_file_if_exists(&self.storage_dir.join(key)).await?;
}
Ok(())
}
pub(crate) async fn remove_expired_files(&mut self) -> std::io::Result<()> {
info!("Checking for expired files");
let now = OffsetDateTime::now_utc();
for (key, file) in std::mem::take(&mut self.0).into_iter() {
for (key, file) in std::mem::take(&mut self.files).into_iter() {
if file.expiry > now {
self.0.insert(key, file);
self.files.insert(key, file);
} else {
info!("Deleting expired file {}", key);
if let Err(e) = tokio::fs::remove_file(storage_dir().join(&key)).await {
warn!("Failed to delete expired file {}: {}", key, e);
}
delete_file_if_exists(&self.storage_dir.join(&key)).await?;
}
}
self.save().await

View file

@ -2,6 +2,7 @@ use std::{collections::HashSet, fs::File, io::Write};
use actix::{fut::future::ActorFutureExt, Actor, ActorContext, AsyncContext, StreamHandler};
use actix_http::ws::{CloseReason, Item};
use actix_web::web;
use actix_web_actors::ws::{self, CloseCode};
use bytes::Bytes;
use log::{debug, error, info, trace};
@ -9,7 +10,11 @@ use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use unicode_normalization::UnicodeNormalization;
use crate::store::{self, storage_dir, StoredFile};
use crate::{
log_auth_failure,
store::{self, FileAddError, StoredFile},
AppState,
};
const MAX_FILES: usize = 256;
const FILENAME_DATE_FORMAT: &[time::format_description::FormatItem] =
@ -29,10 +34,14 @@ enum Error {
NoFiles,
#[error("Number of files submitted by client exceeded the maximum limit")]
TooManyFiles,
#[error("Requested lifetime was too long")]
TooLong,
#[error("Requested lifetime was too long, can be at most {0} days")]
TooLong(u16),
#[error("Upload size was too large, can be at most {0} bytes")]
TooBig(u64),
#[error("File storage is full")]
Full,
#[error("Incorrect password")]
IncorrectPassword,
#[error("Websocket was closed by client before completing transfer")]
ClosedEarly(Option<CloseReason>),
#[error("Client sent more data than they were supposed to")]
@ -50,8 +59,10 @@ impl Error {
Self::DuplicateFilename
| Self::NoFiles
| Self::TooManyFiles
| Self::TooLong
| Self::TooBig(_) => CloseCode::Policy,
| Self::TooLong(_)
| Self::TooBig(_)
| Self::Full
| Self::IncorrectPassword => CloseCode::Policy,
}
}
}
@ -59,17 +70,19 @@ impl Error {
pub struct Uploader {
writer: Option<Box<dyn Write>>,
storage_filename: String,
app_data: super::AppData,
app_state: web::Data<AppState>,
bytes_remaining: u64,
ip_addr: String,
}
impl Uploader {
pub(crate) fn new(app_data: super::AppData) -> Self {
pub(crate) fn new(app_state: web::Data<AppState>, ip_addr: String) -> Self {
Self {
writer: None,
storage_filename: store::gen_storage_code(),
app_data,
storage_filename: store::gen_storage_code(app_state.config.mnemonic_codes),
app_state,
bytes_remaining: 0,
ip_addr,
}
}
}
@ -117,7 +130,8 @@ impl RawUploadedFile {
#[derive(Deserialize)]
struct UploadManifest {
files: Vec<RawUploadedFile>,
lifetime: u32,
lifetime: u16,
password: String,
}
#[derive(Serialize)]
@ -125,7 +139,8 @@ struct UploadManifest {
enum ServerMessage {
Ready { code: String },
TooBig { max_size: u64 },
TooLong { max_days: u32 },
TooLong { max_days: u16 },
IncorrectPassword,
Error { details: String },
}
@ -135,9 +150,10 @@ impl From<&Error> for ServerMessage {
Error::TooBig(max_size) => ServerMessage::TooBig {
max_size: *max_size,
},
Error::TooLong => ServerMessage::TooLong {
max_days: store::max_lifetime(),
Error::TooLong(max_days) => ServerMessage::TooLong {
max_days: *max_days,
},
Error::IncorrectPassword => ServerMessage::IncorrectPassword,
_ => ServerMessage::Error {
details: e.to_string(),
},
@ -189,6 +205,9 @@ fn ack(ctx: &mut Context) {
impl Uploader {
fn notify_error_and_cleanup(&mut self, e: Error, ctx: &mut Context) {
error!("{}", e);
if let Error::IncorrectPassword = e {
log_auth_failure(&self.ip_addr);
}
ctx.text(serde_json::to_string(&ServerMessage::from(&e)).unwrap());
ctx.close(Some(ws::CloseReason {
code: e.close_code(),
@ -207,9 +226,13 @@ impl Uploader {
let UploadManifest {
files: raw_files,
lifetime,
password,
} = serde_json::from_slice(text.as_bytes())?;
if lifetime > store::max_lifetime() {
return Err(Error::TooLong);
if std::env::var("TRANSBEAM_UPLOAD_PASSWORD") != Ok(password) {
return Err(Error::IncorrectPassword);
}
if lifetime > self.app_state.config.max_lifetime {
return Err(Error::TooLong(self.app_state.config.max_lifetime));
}
info!("Received file list: {} files", raw_files.len());
debug!("{:?}", raw_files);
@ -234,7 +257,14 @@ impl Uploader {
self.bytes_remaining += file.size;
files.push(file);
}
let storage_path = storage_dir().join(self.storage_filename.clone());
if self.bytes_remaining > self.app_state.config.max_upload_size {
return Err(Error::TooBig(self.app_state.config.max_upload_size));
}
let storage_path = self
.app_state
.config
.storage_dir
.join(self.storage_filename.clone());
info!("storing to: {:?}", storage_path);
let writer = File::options()
.write(true)
@ -264,25 +294,32 @@ impl Uploader {
modtime,
expiry: OffsetDateTime::now_utc() + lifetime * time::Duration::DAY,
};
let data = self.app_data.clone();
let state = self.app_state.clone();
let storage_filename = self.storage_filename.clone();
ctx.spawn(
actix::fut::wrap_future(async move {
debug!("Spawned future to add entry {} to state", storage_filename);
data.write()
state
.file_store
.write()
.await
.add_file(storage_filename, stored_file)
.await
})
.map(|res, u: &mut Self, ctx: &mut Context| match res {
Ok(Ok(())) => ctx.text(
Ok(()) => ctx.text(
serde_json::to_string(&ServerMessage::Ready {
code: u.storage_filename.clone(),
})
.unwrap(),
),
Ok(Err(size)) => u.notify_error_and_cleanup(Error::TooBig(size), ctx),
Err(e) => u.notify_error_and_cleanup(Error::from(e), ctx),
Err(FileAddError::TooBig(size)) => {
u.notify_error_and_cleanup(Error::TooBig(size), ctx)
}
Err(FileAddError::Full) => u.notify_error_and_cleanup(Error::Full, ctx),
Err(FileAddError::FileSystem(e)) => {
u.notify_error_and_cleanup(Error::from(e), ctx)
}
}),
);
}
@ -332,17 +369,20 @@ impl Uploader {
"Cleaning up after failed upload of {}",
self.storage_filename
);
let data = self.app_data.clone();
let state = self.app_state.clone();
let filename = self.storage_filename.clone();
ctx.wait(
actix::fut::wrap_future(async move {
debug!("Spawned future to remove entry {} from state", filename);
data.write().await.remove_file(&filename).await.unwrap();
state
.file_store
.write()
.await
.remove_file(&filename)
.await
.unwrap();
})
.map(stop_and_flush),
);
if let Err(e) = std::fs::remove_file(storage_dir().join(&self.storage_filename)) {
error!("Failed to remove file {}: {}", self.storage_filename, e);
}
}
}