added file expiration and fleshed out the API a bit

This commit is contained in:
xenofem 2022-04-30 01:38:26 -04:00
parent cc0aaaab94
commit f52aa0f08b
9 changed files with 296 additions and 103 deletions

53
API.md Normal file
View file

@ -0,0 +1,53 @@
# transbeam websocket api
- After opening the connection, the client sends an upload manifest to
the server. This is a JSON object containing the following keys:
- `files`: a list of metadata objects for all the files to be
uploaded, in the exact order they will be sent. This list must
contain at least 1 file and at most 256 files. Each file metadata
object has the following keys, all required:
- `name`: The name of the file. This will be sanitised on the
server side, but the sanitisation library isn't especially
restrictive; most Unicode code points will be allowed through
as-is.
- `size`: The exact size of the file, in bytes.
- `modtime`: The modification time of the file, as milliseconds
since the Unix epoch.
- `lifetime`: an integer number of days the files should be kept
for.
- Once the server receives the metadata, it will respond with a
JSON-encoded object containing at least the field `type`, and
possibly other fields as well. The types of message, and their
associated extra fields if any, are as follows:
- `ready`: The server will accept the upload and is ready to receive
data.
- `code`: A code string that can be used to download the files,
starting now.
- `too_big`: The upload is rejected because the total size of the
files is bigger than the server is willing to accept.
- `max_size`: The maximum total upload size the server will
accept. This is subject to change if the admin changes the
config, or if the server's storage space is filling up.
- `too_long`: The upload is rejected because the requested lifetime
is longer than the server will allow.
- `max_days`: The maximum number of days the client can request
files be kept for.
- `error`: A miscellaneous error has occurred.
- `details`: A string with more information about the error.
If the message type is anything other than `ready`, the connection
will be closed by the server.
- If the server is ready to receive files, the client begins sending
chunks of data from the files, as raw binary blobs. The client must
transmit each file's data in order from start to finish, and must
transmit the files in the same order they were listed in the
metadata. The size of the chunks isn't currently specified, and
it's fine for a chunk to span the end of one file and the start of
the next. After sending each chunk (that is, each complete
websocket message), the client must wait for the server to
acknowledge the chunk by sending back the string "ack", and then
send the next chunk if there is one. Once all chunks have been sent
and acknowledged, or once the server has sent a message other than
"ack" to indicate an error, the connection will be closed.

View file

@ -30,14 +30,14 @@ use actix_web::{
use actix_files::HttpRange; use actix_files::HttpRange;
use crate::DownloadableFile; use crate::store::StoredFile;
// This is copied substantially from actix-files, with some tweaks // This is copied substantially from actix-files, with some tweaks
pub(crate) struct DownloadingFile { pub(crate) struct DownloadingFile {
pub(crate) file: File, pub(crate) file: File,
pub(crate) storage_path: PathBuf, pub(crate) storage_path: PathBuf,
pub(crate) info: DownloadableFile, pub(crate) info: StoredFile,
} }
impl DownloadingFile { impl DownloadingFile {

View file

@ -1,5 +1,5 @@
mod download; mod download;
mod state; mod store;
mod upload; mod upload;
mod util; mod util;
mod zip; mod zip;
@ -10,42 +10,13 @@ use actix_web::{
get, middleware::Logger, web, App, HttpRequest, HttpResponse, HttpServer, Responder, get, middleware::Logger, web, App, HttpRequest, HttpResponse, HttpServer, Responder,
}; };
use actix_web_actors::ws; use actix_web_actors::ws;
use serde::{Deserialize, Serialize}; use log::error;
use state::PersistentState; use store::FileStore;
use time::OffsetDateTime;
use tokio::sync::RwLock; use tokio::sync::RwLock;
const APP_NAME: &str = "transbeam"; const APP_NAME: &str = "transbeam";
pub struct UploadedFile { type AppData = web::Data<RwLock<FileStore>>;
name: String,
size: u64,
modtime: OffsetDateTime,
}
impl UploadedFile {
fn new(name: &str, size: u64, modtime: OffsetDateTime) -> Self {
Self {
name: sanitise_file_name::sanitise(name),
size,
modtime,
}
}
}
#[derive(Clone, Deserialize, Serialize)]
pub struct DownloadableFile {
name: String,
size: u64,
#[serde(with = "state::timestamp")]
modtime: OffsetDateTime,
}
type AppData = web::Data<RwLock<PersistentState>>;
fn storage_dir() -> PathBuf {
PathBuf::from(std::env::var("STORAGE_DIR").unwrap_or_else(|_| String::from("storage")))
}
#[get("/download/{file_code}")] #[get("/download/{file_code}")]
async fn handle_download( async fn handle_download(
@ -60,7 +31,7 @@ async fn handle_download(
let data = data.read().await; let data = data.read().await;
let info = data.lookup_file(&file_code); let info = data.lookup_file(&file_code);
if let Some(info) = info { if let Some(info) = info {
let storage_path = storage_dir().join(file_code); let storage_path = store::storage_dir().join(file_code);
let file = File::open(&storage_path)?; let file = File::open(&storage_path)?;
Ok(download::DownloadingFile { Ok(download::DownloadingFile {
file, file,
@ -82,7 +53,8 @@ async fn handle_upload(req: HttpRequest, stream: web::Payload, data: AppData) ->
async fn main() -> std::io::Result<()> { async fn main() -> std::io::Result<()> {
env_logger::init(); env_logger::init();
let data: AppData = web::Data::new(RwLock::new(PersistentState::load().await?)); let data: AppData = web::Data::new(RwLock::new(FileStore::load().await?));
start_reaper(data.clone());
let static_dir = let static_dir =
PathBuf::from(std::env::var("STATIC_DIR").unwrap_or_else(|_| String::from("static"))); PathBuf::from(std::env::var("STATIC_DIR").unwrap_or_else(|_| String::from("static")));
@ -104,3 +76,16 @@ async fn main() -> std::io::Result<()> {
.await?; .await?;
Ok(()) Ok(())
} }
fn start_reaper(data: AppData) {
std::thread::spawn(move || {
actix_web::rt::System::new().block_on(async {
loop {
actix_web::rt::time::sleep(core::time::Duration::from_secs(86400)).await;
if let Err(e) = data.write().await.remove_expired_files().await {
error!("Error reaping expired files: {}", e);
}
}
});
});
}

View file

@ -1,14 +1,50 @@
use std::{collections::HashMap, io::ErrorKind}; use std::{collections::HashMap, io::ErrorKind, path::PathBuf, str::FromStr};
use log::{debug, error, info, warn}; use log::{debug, error, info, warn};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use tokio::{ use tokio::{
fs::File, fs::File,
io::{AsyncReadExt, AsyncWriteExt}, io::{AsyncReadExt, AsyncWriteExt},
}; };
use crate::{storage_dir, DownloadableFile};
const STATE_FILE_NAME: &str = "files.json"; const STATE_FILE_NAME: &str = "files.json";
const DEFAULT_STORAGE_DIR: &str = "storage";
const DEFAULT_MAX_LIFETIME: u32 = 30;
const GIGA: u64 = 1024*1024*1024;
const DEFAULT_MAX_SINGLE_SIZE: u64 = 16*GIGA;
const DEFAULT_MAX_TOTAL_SIZE: u64 = 64*GIGA;
pub(crate) fn storage_dir() -> PathBuf {
PathBuf::from(std::env::var("STORAGE_DIR").unwrap_or_else(|_| String::from(DEFAULT_STORAGE_DIR)))
}
fn parse_env_var<T: FromStr>(var: &str, default: T) -> T {
std::env::var(var).ok().and_then(|val| val.parse::<T>().ok()).unwrap_or(default)
}
pub(crate) fn max_lifetime() -> u32 {
parse_env_var("TRANSBEAM_MAX_LIFETIME", DEFAULT_MAX_LIFETIME)
}
pub(crate) fn max_single_size() -> u64 {
parse_env_var("TRANSBEAM_MAX_SINGLE_FILE_SIZE", DEFAULT_MAX_SINGLE_SIZE)
}
pub(crate) fn max_total_size() -> u64 {
parse_env_var("TRANSBEAM_MAX_TOTAL_FILE_SIZE", DEFAULT_MAX_TOTAL_SIZE)
}
#[derive(Clone, Deserialize, Serialize)]
pub struct StoredFile {
pub name: String,
pub size: u64,
#[serde(with = "timestamp")]
pub modtime: OffsetDateTime,
#[serde(with = "timestamp")]
pub expiry: OffsetDateTime,
}
pub(crate) mod timestamp { pub(crate) mod timestamp {
use core::fmt; use core::fmt;
@ -51,11 +87,12 @@ pub(crate) mod timestamp {
} }
} }
async fn is_valid_entry(key: &str, info: &DownloadableFile) -> bool { async fn is_valid_entry(key: &str, info: &StoredFile) -> bool {
if !crate::util::is_ascii_alphanumeric(key) { if info.expiry < OffsetDateTime::now_utc() {
error!("Invalid key in persistent storage: {}", key); info!("File {} has expired", key);
return false; return false;
} }
let file = if let Ok(f) = File::open(storage_dir().join(&key)).await { let file = if let Ok(f) = File::open(storage_dir().join(&key)).await {
f f
} else { } else {
@ -81,24 +118,31 @@ async fn is_valid_entry(key: &str, info: &DownloadableFile) -> bool {
true true
} }
pub(crate) struct PersistentState(HashMap<String, DownloadableFile>); pub(crate) struct FileStore(HashMap<String, StoredFile>);
impl PersistentState { impl FileStore {
pub(crate) async fn load() -> std::io::Result<Self> { pub(crate) async fn load() -> std::io::Result<Self> {
let open_result = File::open(storage_dir().join(STATE_FILE_NAME)).await; let open_result = File::open(storage_dir().join(STATE_FILE_NAME)).await;
match open_result { match open_result {
Ok(mut f) => { Ok(mut f) => {
let mut buf = String::new(); let mut buf = String::new();
f.read_to_string(&mut buf).await?; f.read_to_string(&mut buf).await?;
let map: HashMap<String, DownloadableFile> = serde_json::from_str(&buf)?; let map: HashMap<String, StoredFile> = serde_json::from_str(&buf)?;
info!("Loaded {} file entries from persistent storage", map.len()); info!("Loaded {} file entries from persistent storage", map.len());
let mut filtered: HashMap<String, DownloadableFile> = HashMap::new(); let mut filtered: HashMap<String, StoredFile> = HashMap::new();
for (key, info) in map.into_iter() { for (key, info) in map.into_iter() {
// Handle this case separately, because we don't
// want to try to delete it if it's not the sort
// of path we're expecting
if !crate::util::is_ascii_alphanumeric(&key) {
error!("Invalid key in persistent storage: {}", key);
continue;
}
if is_valid_entry(&key, &info).await { if is_valid_entry(&key, &info).await {
filtered.insert(key, info); filtered.insert(key, info);
} else { } else {
info!("Deleting invalid file {}", key); info!("Deleting file {}", key);
if let Err(e) = tokio::fs::remove_file(storage_dir().join(&key)).await { if let Err(e) = tokio::fs::remove_file(storage_dir().join(&key)).await {
warn!("Failed to delete invalid file {}: {}", key, e); warn!("Failed to delete file {}: {}", key, e);
} }
} }
} }
@ -116,6 +160,10 @@ impl PersistentState {
} }
} }
fn total_size(&self) -> u64 {
self.0.iter().fold(0, |acc, (_, f)| acc + f.size)
}
async fn save(&mut self) -> std::io::Result<()> { async fn save(&mut self) -> std::io::Result<()> {
info!("saving updated state: {} entries", self.0.len()); info!("saving updated state: {} entries", self.0.len());
File::create(storage_dir().join(STATE_FILE_NAME)) File::create(storage_dir().join(STATE_FILE_NAME))
@ -124,16 +172,22 @@ impl PersistentState {
.await .await
} }
/// Attempts to add a file to the store. Returns an I/O error if
/// something's broken, or a u64 of the maximum allowed file size
/// if the file was too big, or a unit if everything worked.
pub(crate) async fn add_file( pub(crate) async fn add_file(
&mut self, &mut self,
key: String, key: String,
file: DownloadableFile, file: StoredFile,
) -> std::io::Result<()> { ) -> std::io::Result<Result<(), u64>> {
let remaining_size = max_total_size().saturating_sub(self.total_size());
let allowed_size = std::cmp::min(remaining_size, max_single_size());
if file.size > allowed_size { return Ok(Err(allowed_size)); }
self.0.insert(key, file); self.0.insert(key, file);
self.save().await self.save().await.map(Ok)
} }
pub(crate) fn lookup_file(&self, key: &str) -> Option<DownloadableFile> { pub(crate) fn lookup_file(&self, key: &str) -> Option<StoredFile> {
self.0.get(key).cloned() self.0.get(key).cloned()
} }
@ -142,4 +196,20 @@ impl PersistentState {
self.0.remove(key); self.0.remove(key);
self.save().await self.save().await
} }
pub(crate) async fn remove_expired_files(&mut self) -> std::io::Result<()> {
info!("Checking for expired files");
let now = OffsetDateTime::now_utc();
for (key, file) in std::mem::replace(&mut self.0, HashMap::new()).into_iter() {
if file.expiry > now {
self.0.insert(key, file);
} else {
info!("Deleting expired file {}", key);
if let Err(e) = tokio::fs::remove_file(storage_dir().join(&key)).await {
warn!("Failed to delete expired file {}: {}", key, e);
}
}
}
self.save().await
}
} }

View file

@ -6,10 +6,10 @@ use actix_web_actors::ws::{self, CloseCode};
use bytes::Bytes; use bytes::Bytes;
use log::{debug, error, info, trace}; use log::{debug, error, info, trace};
use rand::distributions::{Alphanumeric, DistString}; use rand::distributions::{Alphanumeric, DistString};
use serde::Deserialize; use serde::{Deserialize, Serialize};
use time::OffsetDateTime; use time::OffsetDateTime;
use crate::{storage_dir, DownloadableFile, UploadedFile}; use crate::store::{storage_dir, StoredFile, self};
const MAX_FILES: usize = 256; const MAX_FILES: usize = 256;
const FILENAME_DATE_FORMAT: &[time::format_description::FormatItem] = const FILENAME_DATE_FORMAT: &[time::format_description::FormatItem] =
@ -31,6 +31,10 @@ enum Error {
NoFiles, NoFiles,
#[error("Number of files submitted by client exceeded the maximum limit")] #[error("Number of files submitted by client exceeded the maximum limit")]
TooManyFiles, TooManyFiles,
#[error("Requested lifetime was too long")]
TooLong,
#[error("Upload size was too large, can be at most {0} bytes")]
TooBig(u64),
#[error("Websocket was closed by client before completing transfer")] #[error("Websocket was closed by client before completing transfer")]
ClosedEarly(Option<CloseReason>), ClosedEarly(Option<CloseReason>),
#[error("Client sent more data than they were supposed to")] #[error("Client sent more data than they were supposed to")]
@ -40,15 +44,17 @@ enum Error {
impl Error { impl Error {
fn close_code(&self) -> CloseCode { fn close_code(&self) -> CloseCode {
match self { match self {
Self::Parse(_) => CloseCode::Invalid, Self::Storage(_)
Self::Storage(_) => CloseCode::Error, | Self::TimeFormat(_) => CloseCode::Error,
Self::TimeFormat(_) => CloseCode::Error, Self::Parse(_)
Self::DuplicateFilename => CloseCode::Policy, | Self::UnexpectedMessageType
Self::UnexpectedMessageType => CloseCode::Invalid, | Self::ClosedEarly(_)
Self::NoFiles => CloseCode::Policy, | Self::UnexpectedExtraData => CloseCode::Invalid,
Self::TooManyFiles => CloseCode::Policy, Self::DuplicateFilename
Self::ClosedEarly(_) => CloseCode::Invalid, | Self::NoFiles
Self::UnexpectedExtraData => CloseCode::Invalid, | Self::TooManyFiles
| Self::TooLong
| Self::TooBig(_) => CloseCode::Policy,
} }
} }
} }
@ -64,7 +70,7 @@ impl Uploader {
pub(crate) fn new(app_data: super::AppData) -> Self { pub(crate) fn new(app_data: super::AppData) -> Self {
Self { Self {
writer: None, writer: None,
storage_filename: String::new(), storage_filename: Alphanumeric.sample_string(&mut rand::thread_rng(), 8),
app_data, app_data,
bytes_remaining: 0, bytes_remaining: 0,
} }
@ -75,6 +81,22 @@ impl Actor for Uploader {
type Context = ws::WebsocketContext<Self>; type Context = ws::WebsocketContext<Self>;
} }
pub struct UploadedFile {
pub name: String,
pub size: u64,
pub modtime: OffsetDateTime,
}
impl UploadedFile {
fn new(name: &str, size: u64, modtime: OffsetDateTime) -> Self {
Self {
name: sanitise_file_name::sanitise(name),
size,
modtime,
}
}
}
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct RawUploadedFile { struct RawUploadedFile {
name: String, name: String,
@ -93,6 +115,31 @@ impl RawUploadedFile {
} }
} }
#[derive(Deserialize)]
struct UploadManifest {
files: Vec<RawUploadedFile>,
lifetime: u32,
}
#[derive(Serialize)]
#[serde(rename_all = "snake_case", tag = "type")]
enum ServerMessage {
Ready { code: String },
TooBig { max_size: u64 },
TooLong { max_days: u32 },
Error { details: String },
}
impl From<&Error> for ServerMessage {
fn from(e: &Error) -> Self {
match e {
Error::TooBig(max_size) => ServerMessage::TooBig { max_size: *max_size },
Error::TooLong => ServerMessage::TooLong { max_days: store::max_lifetime() },
_ => ServerMessage::Error { details: e.to_string() },
}
}
}
fn stop_and_flush<T>(_: T, u: &mut Uploader, ctx: &mut <Uploader as Actor>::Context) { fn stop_and_flush<T>(_: T, u: &mut Uploader, ctx: &mut <Uploader as Actor>::Context) {
ctx.stop(); ctx.stop();
if let Some(w) = u.writer.as_mut() { if let Some(w) = u.writer.as_mut() {
@ -115,12 +162,7 @@ impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for Uploader {
match self.handle_message(msg, ctx) { match self.handle_message(msg, ctx) {
Err(e) => { Err(e) => {
error!("{}", e); self.notify_error_and_cleanup(e, ctx);
ctx.close(Some(ws::CloseReason {
code: e.close_code(),
description: Some(e.to_string()),
}));
self.cleanup_after_error(ctx);
} }
Ok(true) => { Ok(true) => {
info!("Finished uploading data"); info!("Finished uploading data");
@ -140,6 +182,16 @@ fn ack(ctx: &mut <Uploader as Actor>::Context) {
} }
impl Uploader { impl Uploader {
fn notify_error_and_cleanup(&mut self, e: Error, ctx: &mut <Self as Actor>::Context) {
error!("{}", e);
ctx.text(serde_json::to_string(&ServerMessage::from(&e)).unwrap());
ctx.close(Some(ws::CloseReason {
code: e.close_code(),
description: Some(e.to_string()),
}));
self.cleanup_after_error(ctx);
}
fn handle_message( fn handle_message(
&mut self, &mut self,
msg: ws::Message, msg: ws::Message,
@ -151,12 +203,18 @@ impl Uploader {
if self.writer.is_some() { if self.writer.is_some() {
return Err(Error::UnexpectedMessageType); return Err(Error::UnexpectedMessageType);
} }
let raw_files: Vec<RawUploadedFile> = serde_json::from_slice(text.as_bytes())?; let UploadManifest { files: raw_files, lifetime, } = serde_json::from_slice(text.as_bytes())?;
if lifetime > store::max_lifetime() {
return Err(Error::TooLong);
}
info!("Received file list: {} files", raw_files.len()); info!("Received file list: {} files", raw_files.len());
debug!("{:?}", raw_files); debug!("{:?}", raw_files);
if raw_files.len() > MAX_FILES { if raw_files.len() > MAX_FILES {
return Err(Error::TooManyFiles); return Err(Error::TooManyFiles);
} }
if raw_files.is_empty() {
return Err(Error::NoFiles);
}
let mut filenames: HashSet<String> = HashSet::new(); let mut filenames: HashSet<String> = HashSet::new();
let mut files = Vec::new(); let mut files = Vec::new();
for raw_file in raw_files.iter() { for raw_file in raw_files.iter() {
@ -172,18 +230,13 @@ impl Uploader {
self.bytes_remaining += file.size; self.bytes_remaining += file.size;
files.push(file); files.push(file);
} }
if files.is_empty() { let storage_path = storage_dir().join(self.storage_filename.clone());
return Err(Error::NoFiles);
}
let storage_filename = Alphanumeric.sample_string(&mut rand::thread_rng(), 8);
self.storage_filename = storage_filename.clone();
let storage_path = storage_dir().join(storage_filename.clone());
info!("storing to: {:?}", storage_path); info!("storing to: {:?}", storage_path);
let writer = File::options() let writer = File::options()
.write(true) .write(true)
.create_new(true) .create_new(true)
.open(&storage_path)?; .open(&storage_path)?;
let (writer, downloadable_file): (Box<dyn Write>, _) = if files.len() > 1 { let (writer, name, size, modtime): (Box<dyn Write>,_,_,_) = if files.len() > 1 {
info!("Wrapping in zipfile generator"); info!("Wrapping in zipfile generator");
let now = OffsetDateTime::now_utc(); let now = OffsetDateTime::now_utc();
let zip_writer = super::zip::ZipGenerator::new(files, writer); let zip_writer = super::zip::ZipGenerator::new(files, writer);
@ -192,33 +245,40 @@ impl Uploader {
super::APP_NAME.to_owned() + &now.format(FILENAME_DATE_FORMAT)? + ".zip"; super::APP_NAME.to_owned() + &now.format(FILENAME_DATE_FORMAT)? + ".zip";
( (
Box::new(zip_writer), Box::new(zip_writer),
DownloadableFile { download_filename,
name: download_filename,
size, size,
modtime: now, now,
},
) )
} else { } else {
( (
Box::new(writer), Box::new(writer),
DownloadableFile { files[0].name.clone(),
name: files[0].name.clone(), files[0].size,
size: files[0].size, files[0].modtime,
modtime: files[0].modtime,
},
) )
}; };
self.writer = Some(writer); self.writer = Some(writer);
let stored_file = StoredFile {
name,
size,
modtime,
expiry: OffsetDateTime::now_utc() + lifetime*time::Duration::DAY,
};
let data = self.app_data.clone(); let data = self.app_data.clone();
let storage_filename = self.storage_filename.clone();
ctx.spawn(actix::fut::wrap_future(async move { ctx.spawn(actix::fut::wrap_future(async move {
debug!("Spawned future to add entry {} to state", storage_filename); debug!("Spawned future to add entry {} to state", storage_filename);
data.write() data.write()
.await .await
.add_file(storage_filename, downloadable_file) .add_file(storage_filename, stored_file)
.await .await
.unwrap(); }).map(|res, u: &mut Self, ctx: &mut <Self as Actor>::Context| {
match res {
Ok(Ok(())) => ctx.text(serde_json::to_string(&ServerMessage::Ready { code: u.storage_filename.clone() }).unwrap()),
Ok(Err(size)) => u.notify_error_and_cleanup(Error::TooBig(size), ctx),
Err(e) => u.notify_error_and_cleanup(Error::from(e), ctx)
}
})); }));
ctx.text(self.storage_filename.as_str());
} }
ws::Message::Binary(data) | ws::Message::Continuation(Item::Last(data)) => { ws::Message::Binary(data) | ws::Message::Continuation(Item::Last(data)) => {
let result = self.handle_data(data)?; let result = self.handle_data(data)?;

View file

@ -4,7 +4,7 @@ use crc32fast::Hasher;
use log::debug; use log::debug;
use time::OffsetDateTime; use time::OffsetDateTime;
use crate::UploadedFile; use crate::upload::UploadedFile;
const SIGNATURE_SIZE: u64 = 4; const SIGNATURE_SIZE: u64 = 4;
const SHARED_FIELDS_SIZE: u64 = 26; const SHARED_FIELDS_SIZE: u64 = 26;

View file

@ -18,6 +18,17 @@
<noscript>This page requires Javascript :(</noscript> <noscript>This page requires Javascript :(</noscript>
<button id="upload">Upload</button> <button id="upload">Upload</button>
<div id="lifetime_container" style="display: none;">
<label>
Keep files for:
<select id="lifetime">
<option value="1">1 day</option>
<option value="7">1 week</option>
<option value="14" selected>2 weeks</option>
<option value="30">1 month</option>
</select>
</label>
</div>
<div id="download_link_container" style="display: none;"> <div id="download_link_container" style="display: none;">
<div id="download_link_main"> <div id="download_link_main">
<div>Download link: <span id="download_link"></span></div><div class="copy_button"></div> <div>Download link: <span id="download_link"></span></div><div class="copy_button"></div>

View file

@ -133,6 +133,10 @@ button:disabled, input:disabled + .fake_button {
cursor: not-allowed; cursor: not-allowed;
} }
#lifetime_container {
margin-top: 10px;
}
#footer { #footer {
margin-top: 30px; margin-top: 30px;
} }

View file

@ -8,6 +8,9 @@ const fileInputMessage = document.getElementById('file_input_message');
const fileListContainer = document.getElementById('file_list_container'); const fileListContainer = document.getElementById('file_list_container');
const fileList = document.getElementById('file_list'); const fileList = document.getElementById('file_list');
const lifetimeContainer = document.getElementById('lifetime_container');
const lifetimeInput = document.getElementById('lifetime');
const uploadButton = document.getElementById('upload'); const uploadButton = document.getElementById('upload');
const downloadLinkContainer = document.getElementById('download_link_container'); const downloadLinkContainer = document.getElementById('download_link_container');
@ -27,13 +30,13 @@ let byteIndex = 0;
let bytesSent = 0; let bytesSent = 0;
let totalBytes = 0; let totalBytes = 0;
function sendMetadata() { function sendManifest(lifetime) {
const metadata = files.map((file) => ({ const fileMetadata = files.map((file) => ({
name: file.name, name: file.name,
size: file.size, size: file.size,
modtime: file.lastModified, modtime: file.lastModified,
})); }));
socket.send(JSON.stringify(metadata)); socket.send(JSON.stringify({ lifetime, files: fileMetadata }));
} }
function finishSending() { function finishSending() {
@ -95,11 +98,13 @@ function updateFiles() {
fileInputMessage.textContent = 'Select files to upload...'; fileInputMessage.textContent = 'Select files to upload...';
fileListContainer.style.display = 'none'; fileListContainer.style.display = 'none';
uploadButton.style.display = 'none'; uploadButton.style.display = 'none';
lifetimeContainer.style.display = 'none';
} else { } else {
fileInputMessage.textContent = 'Select more files to upload...'; fileInputMessage.textContent = 'Select more files to upload...';
fileListContainer.style.display = ''; fileListContainer.style.display = '';
uploadButton.textContent = `Upload ${files.length} file${files.length > 1 ? 's' : ''} (${displaySize(totalBytes)})`; uploadButton.textContent = `Upload ${files.length} file${files.length > 1 ? 's' : ''} (${displaySize(totalBytes)})`;
uploadButton.style.display = ''; uploadButton.style.display = '';
lifetimeContainer.style.display = '';
} }
fileInput.disabled = (files.length >= MAX_FILES); fileInput.disabled = (files.length >= MAX_FILES);
} }
@ -156,20 +161,25 @@ fileInput.addEventListener('input', (e) => {
uploadButton.addEventListener('click', (e) => { uploadButton.addEventListener('click', (e) => {
if (files.length === 0) { return; } if (files.length === 0) { return; }
const lifetime = parseInt(lifetimeInput.value);
lifetimeContainer.remove();
fileInputContainer.remove(); fileInputContainer.remove();
for (const button of Array.from(document.getElementsByTagName('button')).concat(...document.getElementsByClassName('file_delete'))) { for (const button of Array.from(document.getElementsByTagName('button')).concat(...document.getElementsByClassName('file_delete'))) {
button.remove(); button.remove();
} }
socket = new WebSocket(`${window.location.protocol === 'http:' ? 'ws' : 'wss'}://${window.location.host}/upload`); socket = new WebSocket(`${window.location.protocol === 'http:' ? 'ws' : 'wss'}://${window.location.host}/upload`);
socket.addEventListener('open', sendMetadata); socket.addEventListener('open', () => sendManifest(lifetime));
socket.addEventListener('message', (msg) => { socket.addEventListener('message', (msg) => {
if (bytesSent === 0 && msg.data.match(/^[A-Za-z0-9]+$/)) { if (bytesSent === 0) {
downloadLink.textContent = `${window.location.origin}/download/${msg.data}`; const reply = JSON.parse(msg.data);
if (reply.type === 'ready' && reply.code.match(/^[A-Za-z0-9]+$/)) {
downloadLink.textContent = `${window.location.origin}/download/${reply.code}`;
downloadLinkContainer.style.display = ''; downloadLinkContainer.style.display = '';
updateProgress(); updateProgress();
progressContainer.style.display = ''; progressContainer.style.display = '';
sendData(); sendData();
}
} else if (msg.data === 'ack') { } else if (msg.data === 'ack') {
sendData(); sendData();
} }