mod download; mod state; mod store; mod upload; mod zip; use std::{fmt::Debug, ops::Deref, path::PathBuf, str::FromStr}; use actix_http::StatusCode; use actix_session::{storage::CookieSessionStore, Session, SessionMiddleware}; use actix_web::{ cookie, error::InternalError, get, middleware::Logger, post, web, App, HttpRequest, HttpResponse, HttpServer, Responder, }; use actix_web_actors::ws; use argon2::{Argon2, PasswordVerifier}; use askama_actix::{Template, TemplateToResponse}; use bytesize::ByteSize; use log::{error, warn}; use password_hash::PasswordHashString; use serde::{Deserialize, Serialize}; use state::{prelude::SizedFile, StateDb}; use store::{StoredFile, StoredFiles}; use tokio::fs::File; const APP_NAME: &str = "transbeam"; const DATE_DISPLAY_FORMAT: &[time::format_description::FormatItem] = time::macros::format_description!("[year]-[month]-[day]"); struct AppData { state: state::StateDb, config: Config, } struct Config { base_url: String, max_storage_size: u64, min_disk_free: u64, max_upload_size: u64, max_lifetime: u16, upload_password: String, storage_dir: PathBuf, reverse_proxy: bool, mnemonic_codes: bool, cachebuster: String, admin_password_hash: PasswordHashString, } pub fn get_ip_addr(req: &HttpRequest, reverse_proxy: bool) -> String { let conn_info = req.connection_info(); if reverse_proxy { conn_info.realip_remote_addr() } else { conn_info.peer_addr() } .unwrap() .to_owned() } pub fn log_auth_failure(ip_addr: &str) { warn!("Incorrect authentication attempt from {}", ip_addr); } #[derive(Template)] #[template(path = "index.html")] struct IndexPage { cachebuster: String, base_url: String, } #[get("/")] async fn index(data: web::Data) -> impl Responder { IndexPage { cachebuster: data.config.cachebuster.clone(), base_url: data.config.base_url.clone(), } } #[derive(Template)] #[template(path = "admin/signed_out.html")] struct SignedOutAdminPage { cachebuster: String, base_url: String, incorrect_password: bool, } #[derive(Template)] #[template(path = "admin/signed_in.html")] struct AdminPage<'a> { cachebuster: String, base_url: String, stored_files: &'a StoredFiles, } #[get("/admin")] async fn admin_panel( data: web::Data, session: Session, ) -> actix_web::Result { if let Some(true) = session.get::("admin")? { Ok(AdminPage { cachebuster: data.config.cachebuster.clone(), base_url: data.config.base_url.clone(), stored_files: data.state.read().await.deref(), } .to_response()) } else { Ok(SignedOutAdminPage { cachebuster: data.config.cachebuster.clone(), base_url: data.config.base_url.clone(), incorrect_password: false, } .to_response()) } } #[derive(Deserialize)] struct AdminPanelSignin { password: String, } #[post("/admin")] async fn admin_signin( req: HttpRequest, data: web::Data, form: web::Form, session: Session, ) -> actix_web::Result { if Argon2::default() .verify_password( form.password.as_bytes(), &data.config.admin_password_hash.password_hash(), ) .is_ok() { session.insert("admin", true)?; Ok(AdminPage { cachebuster: data.config.cachebuster.clone(), base_url: data.config.base_url.clone(), stored_files: data.state.read().await.deref(), } .to_response()) } else { let ip_addr = get_ip_addr(&req, data.config.reverse_proxy); log_auth_failure(&ip_addr); let mut resp = SignedOutAdminPage { cachebuster: data.config.cachebuster.clone(), base_url: data.config.base_url.clone(), incorrect_password: true, } .to_response(); *resp.status_mut() = StatusCode::FORBIDDEN; Err(InternalError::from_response("Incorrect password", resp).into()) } } #[derive(Deserialize)] struct DownloadRequest { code: String, download: Option, } #[derive(Template)] #[template(path = "download.html")] struct DownloadPage<'a> { info: DownloadInfo, cachebuster: &'a str, base_url: &'a str, } #[derive(Serialize)] struct DownloadInfo { file: StoredFile, code: String, available: u64, offsets: Option>, } #[get("/download")] async fn handle_download( req: HttpRequest, query: web::Query, data: web::Data, ) -> actix_web::Result { let code = &query.code; if !store::is_valid_storage_code(code) { return not_found(req, data, true); } let store = data.state.read().await; let info = store.0.get(code); let info = if let Some(i) = info { i.file.clone() } else { return not_found(req, data, true); }; let storage_path = data.config.storage_dir.join(code); let file = File::open(&storage_path).await?; if let Some(selection) = query.download { if let download::DownloadSelection::One(n) = selection { if let Some(ref files) = info.contents { if n >= files.files.len() { return not_found(req, data, false); } } else { return not_found(req, data, false); } } Ok(download::DownloadingFile { file: file.into_std().await, storage_path, info, selection, } .into_response(&req)) } else { let offsets = info.contents.as_ref().map(zip::file_data_offsets); Ok(DownloadPage { info: DownloadInfo { file: info, code: code.clone(), available: file.metadata().await?.len(), offsets, }, cachebuster: &data.config.cachebuster, base_url: &data.config.base_url, } .to_response()) } } #[derive(Deserialize)] struct InfoQuery { code: String, } #[get("/info")] async fn download_info( req: HttpRequest, query: web::Query, data: web::Data, ) -> actix_web::Result { let code = &query.code; if !store::is_valid_storage_code(code) { return not_found(req, data, true); } let store = data.state.read().await; let info = store.0.get(code); let info = if let Some(i) = info { i.file.clone() } else { return not_found(req, data, true); }; let storage_path = data.config.storage_dir.join(code); let offsets = info.contents.as_ref().map(zip::file_data_offsets); Ok(web::Json(DownloadInfo { file: info, code: code.clone(), available: File::open(&storage_path).await?.metadata().await?.len(), offsets, })) } #[derive(Template)] #[template(path = "404.html")] struct NotFoundPage<'a> { cachebuster: &'a str, base_url: &'a str, } fn not_found(req: HttpRequest, data: web::Data, report: bool) -> actix_web::Result { if report { let ip_addr = get_ip_addr(&req, data.config.reverse_proxy); log_auth_failure(&ip_addr); } let mut resp = NotFoundPage { cachebuster: &data.config.cachebuster, base_url: &data.config.base_url, } .to_response(); *resp.status_mut() = StatusCode::NOT_FOUND; Err(InternalError::from_response("Download not found", resp).into()) } #[get("/upload")] async fn handle_upload( req: HttpRequest, stream: web::Payload, data: web::Data, ) -> actix_web::Result { if data.full().await? { return Ok(HttpResponse::BadRequest().finish()); } let ip_addr = get_ip_addr(&req, data.config.reverse_proxy); ws::start(upload::Uploader::new(data, ip_addr), &req, stream) } #[derive(Deserialize)] struct UploadPasswordCheck { password: String, } #[post("/upload/check_password")] async fn check_upload_password( req: HttpRequest, body: web::Json, data: web::Data, ) -> impl Responder { let ip_addr = get_ip_addr(&req, data.config.reverse_proxy); if body.password != data.config.upload_password { log_auth_failure(&ip_addr); HttpResponse::Forbidden().finish() } else { HttpResponse::NoContent().finish() } } #[derive(Serialize)] struct UploadLimits { open: bool, max_size: u64, max_lifetime: u16, } #[get("/upload/limits.json")] async fn upload_limits(data: web::Data) -> actix_web::Result { let open = !data.full().await?; let available_size = data.available_size().await?; let max_size = std::cmp::min(available_size, data.config.max_upload_size); Ok(web::Json(UploadLimits { open, max_size, max_lifetime: data.config.max_lifetime, })) } fn env_or(var: &str, default: T) -> T where ::Err: Debug, { std::env::var(var) .map(|val| { val.parse::() .unwrap_or_else(|_| panic!("Invalid value {} for variable {}", val, var)) }) .unwrap_or(default) } fn env_or_else T>(var: &str, default: F) -> T where ::Err: Debug, { std::env::var(var) .map(|val| { val.parse::() .unwrap_or_else(|_| panic!("Invalid value {} for variable {}", val, var)) }) .ok() .unwrap_or_else(default) } fn env_or_panic(var: &str) -> T where ::Err: Debug, { let val = std::env::var(var).unwrap_or_else(|_| panic!("{} must be set!", var)); val.parse::() .unwrap_or_else(|_| panic!("Invalid value {} for variable {}", val, var)) } #[actix_web::main] async fn main() -> std::io::Result<()> { dotenvy::dotenv().ok(); env_logger::init(); let static_dir: PathBuf = env_or_else("TRANSBEAM_STATIC_DIR", || PathBuf::from("static")); let storage_dir: PathBuf = env_or_else("TRANSBEAM_STORAGE_DIR", || PathBuf::from("storage")); let base_url: String = env_or_panic("TRANSBEAM_BASE_URL"); let port: u16 = env_or("TRANSBEAM_PORT", 8080); let mnemonic_codes: bool = env_or("TRANSBEAM_MNEMONIC_CODES", true); let reverse_proxy: bool = env_or("TRANSBEAM_REVERSE_PROXY", true); let max_lifetime: u16 = env_or("TRANSBEAM_MAX_LIFETIME", 30); let max_upload_size: u64 = env_or::("TRANSBEAM_MAX_UPLOAD_SIZE", ByteSize(16 * bytesize::GB)).as_u64(); let max_storage_size: u64 = env_or::("TRANSBEAM_MAX_STORAGE_SIZE", ByteSize(64 * bytesize::GB)).as_u64(); let min_disk_free: u64 = env_or::("TRANSBEAM_MIN_DISK_FREE", ByteSize(8 * bytesize::GB)).as_u64(); let upload_password: String = env_or_panic("TRANSBEAM_UPLOAD_PASSWORD"); let cachebuster: String = env_or_else("TRANSBEAM_CACHEBUSTER", String::new); let admin_password_hash: PasswordHashString = env_or_panic("TRANSBEAM_ADMIN_PASSWORD_HASH"); let cookie_secret_base64: String = env_or_panic("TRANSBEAM_COOKIE_SECRET"); let cookie_key = cookie::Key::from(&base64::decode(&cookie_secret_base64).unwrap_or_else(|_| { panic!( "Value {} for TRANSBEAM_COOKIE_SECRET is not valid base64", cookie_secret_base64 ) })); let state_file: PathBuf = match std::env::var("TRANSBEAM_STATE_FILE") { Ok(v) => v .parse() .unwrap_or_else(|_| panic!("Invalid value {} for variable TRANSBEAM_STATE_FILE", v)), Err(_) => { let legacy_state_file = storage_dir.join("files.json"); if legacy_state_file.is_file() { legacy_state_file } else { PathBuf::from("transbeam.json") } } }; let data = web::Data::new(AppData { state: StateDb::load(state_file) .await .expect("Failed to load state file"), config: Config { base_url, max_upload_size, max_storage_size, min_disk_free, max_lifetime, upload_password, storage_dir, reverse_proxy, mnemonic_codes, cachebuster, admin_password_hash, }, }); data.cleanup().await?; start_reaper(data.clone()); let server = HttpServer::new(move || { App::new() .app_data(data.clone()) .wrap(if data.config.reverse_proxy { Logger::new(r#"%{r}a "%r" %s %b "%{Referer}i" "%{User-Agent}i" %T"#) } else { Logger::default() }) .wrap(SessionMiddleware::new( CookieSessionStore::default(), cookie_key.clone(), )) .service(index) .service(handle_download) .service(download_info) .service(handle_upload) .service(check_upload_password) .service(upload_limits) .service(admin_panel) .service(admin_signin) .service(actix_files::Files::new("/", static_dir.clone())) }); if reverse_proxy { server .bind((std::net::Ipv4Addr::LOCALHOST, port))? .bind((std::net::Ipv6Addr::LOCALHOST, port))? } else { // Looks like this also picks up IPv4? // Binding 0.0.0.0 and :: on the same port fails with an error. server.bind((std::net::Ipv6Addr::UNSPECIFIED, port))? } .run() .await?; Ok(()) } fn start_reaper(data: web::Data) { std::thread::spawn(move || { actix_web::rt::System::new().block_on(async { loop { actix_web::rt::time::sleep(core::time::Duration::from_secs(86400)).await; if let Err(e) = data.remove_expired_files().await { error!("Error reaping expired files: {}", e); } } }); }); }