allow downloading individual files from bundle
This commit is contained in:
parent
43d03869ab
commit
007289ffe5
15 changed files with 499 additions and 69 deletions
113
src/download.rs
113
src/download.rs
|
@ -1,3 +1,4 @@
|
|||
use core::fmt;
|
||||
use std::{
|
||||
cmp,
|
||||
fs::File,
|
||||
|
@ -14,6 +15,8 @@ use futures_core::{ready, Stream};
|
|||
use inotify::{Inotify, WatchMask};
|
||||
use log::trace;
|
||||
use pin_project_lite::pin_project;
|
||||
use serde::{de, Deserialize, Deserializer};
|
||||
use time::OffsetDateTime;
|
||||
use std::{os::unix::fs::MetadataExt, time::SystemTime};
|
||||
|
||||
use actix_web::{
|
||||
|
@ -30,7 +33,55 @@ use actix_web::{
|
|||
|
||||
use actix_files::HttpRange;
|
||||
|
||||
use crate::store::StoredFile;
|
||||
use crate::{store::StoredFile, upload::UploadedFile};
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum DownloadSelection {
|
||||
One(usize),
|
||||
All,
|
||||
}
|
||||
|
||||
impl fmt::Display for DownloadSelection {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
DownloadSelection::All => write!(f, "all"),
|
||||
DownloadSelection::One(n) => n.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct SelectionVisitor;
|
||||
|
||||
impl<'de> de::Visitor<'de> for SelectionVisitor {
|
||||
type Value = DownloadSelection;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(formatter, r#"a nonnegative integer or the string "all""#)
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E> {
|
||||
Ok(DownloadSelection::One(v as usize))
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||
where E: de::Error {
|
||||
if v == "all" {
|
||||
Ok(DownloadSelection::All)
|
||||
} else if let Ok(n) = v.parse::<usize>() {
|
||||
Ok(DownloadSelection::One(n))
|
||||
} else {
|
||||
Err(de::Error::invalid_value(de::Unexpected::Str(v), &self))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for DownloadSelection {
|
||||
fn deserialize<D: Deserializer<'de>>(
|
||||
de: D
|
||||
) -> Result<DownloadSelection, D::Error> {
|
||||
de.deserialize_any(SelectionVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
// This is copied substantially from actix-files, with some tweaks
|
||||
|
||||
|
@ -38,24 +89,64 @@ pub(crate) struct DownloadingFile {
|
|||
pub(crate) file: File,
|
||||
pub(crate) storage_path: PathBuf,
|
||||
pub(crate) info: StoredFile,
|
||||
pub(crate) selection: DownloadSelection,
|
||||
}
|
||||
|
||||
impl DownloadingFile {
|
||||
fn selected(&self) -> Option<&UploadedFile> {
|
||||
match self.selection {
|
||||
DownloadSelection::All => None,
|
||||
DownloadSelection::One(n) => Some(self.info.contents.as_ref()?.get(n)?),
|
||||
}
|
||||
}
|
||||
|
||||
fn name(&self) -> &str {
|
||||
match self.selected() {
|
||||
None => &self.info.name,
|
||||
Some(f) => &f.name,
|
||||
}
|
||||
}
|
||||
|
||||
fn size(&self) -> u64 {
|
||||
match self.selected() {
|
||||
None => self.info.size,
|
||||
Some(f) => f.size,
|
||||
}
|
||||
}
|
||||
|
||||
fn modtime(&self) -> OffsetDateTime {
|
||||
match self.selected() {
|
||||
None => self.info.modtime,
|
||||
Some(f) => f.modtime,
|
||||
}
|
||||
}
|
||||
|
||||
fn baseline_offset(&self) -> u64 {
|
||||
if let (DownloadSelection::One(n), Some(files)) = (self.selection, self.info.contents.as_ref()) {
|
||||
crate::zip::file_data_offset(&files, n)
|
||||
} else {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
fn etag(&self) -> EntityTag {
|
||||
let ino = self.file.metadata().map(|md| md.ino()).unwrap_or_default();
|
||||
let modtime = self.modtime();
|
||||
EntityTag::new_strong(format!(
|
||||
"{:x}:{:x}:{:x}:{:x}",
|
||||
"{:x}:{}:{:x}:{:x}:{:x}",
|
||||
ino,
|
||||
self.info.size,
|
||||
self.info.modtime.unix_timestamp() as u64,
|
||||
self.info.modtime.nanosecond(),
|
||||
self.selection,
|
||||
self.size(),
|
||||
modtime.unix_timestamp() as u64,
|
||||
modtime.nanosecond(),
|
||||
))
|
||||
}
|
||||
|
||||
/// Creates an `HttpResponse` with file as a streaming body.
|
||||
pub fn into_response(self, req: &HttpRequest) -> HttpResponse<BoxBody> {
|
||||
let total_size = self.size();
|
||||
let etag = self.etag();
|
||||
let last_modified = HttpDate::from(SystemTime::from(self.info.modtime));
|
||||
let last_modified = HttpDate::from(SystemTime::from(self.modtime()));
|
||||
|
||||
let precondition_failed = precondition_failed(req, &etag, &last_modified);
|
||||
let not_modified = not_modified(req, &etag, &last_modified);
|
||||
|
@ -68,14 +159,14 @@ impl DownloadingFile {
|
|||
header::CONTENT_DISPOSITION,
|
||||
ContentDisposition {
|
||||
disposition: DispositionType::Attachment,
|
||||
parameters: vec![DispositionParam::Filename(self.info.name)],
|
||||
parameters: vec![DispositionParam::Filename(self.name().to_string())],
|
||||
},
|
||||
));
|
||||
res.insert_header((header::LAST_MODIFIED, last_modified));
|
||||
res.insert_header((header::ETAG, etag));
|
||||
res.insert_header((header::ACCEPT_RANGES, "bytes"));
|
||||
|
||||
let mut length = self.info.size;
|
||||
let mut length = total_size;
|
||||
let mut offset = 0;
|
||||
|
||||
// check for range header
|
||||
|
@ -97,7 +188,7 @@ impl DownloadingFile {
|
|||
"bytes {}-{}/{}",
|
||||
offset,
|
||||
offset + length - 1,
|
||||
self.info.size
|
||||
total_size,
|
||||
),
|
||||
));
|
||||
} else {
|
||||
|
@ -118,9 +209,9 @@ impl DownloadingFile {
|
|||
.map_into_boxed_body();
|
||||
}
|
||||
|
||||
let reader = new_live_reader(length, offset, self.file, self.storage_path);
|
||||
let reader = new_live_reader(length, self.baseline_offset() + offset, self.file, self.storage_path);
|
||||
|
||||
if offset != 0 || length != self.info.size {
|
||||
if offset != 0 || length != total_size {
|
||||
res.status(StatusCode::PARTIAL_CONTENT);
|
||||
}
|
||||
|
||||
|
|
61
src/main.rs
61
src/main.rs
|
@ -1,9 +1,10 @@
|
|||
mod download;
|
||||
mod store;
|
||||
mod timestamp;
|
||||
mod upload;
|
||||
mod zip;
|
||||
|
||||
use std::{fmt::Debug, fs::File, path::PathBuf, str::FromStr};
|
||||
use std::{fmt::Debug, path::PathBuf, str::FromStr};
|
||||
|
||||
use actix_files::NamedFile;
|
||||
use actix_web::{
|
||||
|
@ -11,11 +12,12 @@ use actix_web::{
|
|||
HttpServer, Responder,
|
||||
};
|
||||
use actix_web_actors::ws;
|
||||
use askama::Template;
|
||||
use bytesize::ByteSize;
|
||||
use log::{error, warn};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use store::FileStore;
|
||||
use tokio::sync::RwLock;
|
||||
use store::{FileStore, StoredFile};
|
||||
use tokio::{fs::File, sync::RwLock};
|
||||
|
||||
const APP_NAME: &str = "transbeam";
|
||||
|
||||
|
@ -49,9 +51,19 @@ pub fn log_auth_failure(ip_addr: &str) {
|
|||
warn!("Incorrect authentication attempt from {}", ip_addr);
|
||||
}
|
||||
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct DownloadRequest {
|
||||
code: String,
|
||||
download: Option<download::DownloadSelection>,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "download.html")]
|
||||
struct DownloadInfo<'a> {
|
||||
file: StoredFile,
|
||||
code: &'a str,
|
||||
available: u64,
|
||||
}
|
||||
|
||||
#[get("/download")]
|
||||
|
@ -62,29 +74,52 @@ async fn handle_download(
|
|||
) -> actix_web::Result<HttpResponse> {
|
||||
let code = &download.code;
|
||||
if !store::is_valid_storage_code(code) {
|
||||
return download_not_found(req, data);
|
||||
return not_found(req, data, true);
|
||||
}
|
||||
let info = data.file_store.read().await.lookup_file(code);
|
||||
if let Some(info) = info {
|
||||
let storage_path = data.config.storage_dir.join(code);
|
||||
let file = File::open(&storage_path)?;
|
||||
let info = if let Some(i) = info {
|
||||
i
|
||||
} else {
|
||||
return not_found(req, data, true)
|
||||
};
|
||||
|
||||
let storage_path = data.config.storage_dir.join(code);
|
||||
let file = File::open(&storage_path).await?;
|
||||
if let Some(selection) = download.download {
|
||||
if let download::DownloadSelection::One(n) = selection {
|
||||
if let Some(ref files) = info.contents {
|
||||
if n >= files.len() {
|
||||
return not_found(req, data, false);
|
||||
}
|
||||
} else {
|
||||
return not_found(req, data, false);
|
||||
}
|
||||
}
|
||||
Ok(download::DownloadingFile {
|
||||
file,
|
||||
file: file.into_std().await,
|
||||
storage_path,
|
||||
info,
|
||||
selection,
|
||||
}
|
||||
.into_response(&req))
|
||||
.into_response(&req))
|
||||
} else {
|
||||
download_not_found(req, data)
|
||||
Ok(HttpResponse::Ok().body(DownloadInfo {
|
||||
file: info,
|
||||
code,
|
||||
available: file.metadata().await?.len(),
|
||||
}.render().unwrap()))
|
||||
}
|
||||
}
|
||||
|
||||
fn download_not_found(
|
||||
fn not_found(
|
||||
req: HttpRequest,
|
||||
data: web::Data<AppState>,
|
||||
report: bool,
|
||||
) -> actix_web::Result<HttpResponse> {
|
||||
let ip_addr = get_ip_addr(&req, data.config.reverse_proxy);
|
||||
log_auth_failure(&ip_addr);
|
||||
if report {
|
||||
let ip_addr = get_ip_addr(&req, data.config.reverse_proxy);
|
||||
log_auth_failure(&ip_addr);
|
||||
}
|
||||
Ok(NamedFile::open(data.config.static_dir.join("404.html"))?
|
||||
.set_status_code(StatusCode::NOT_FOUND)
|
||||
.into_response(&req))
|
||||
|
|
50
src/store.rs
50
src/store.rs
|
@ -10,12 +10,15 @@ use rand::{
|
|||
thread_rng, Rng,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_with::skip_serializing_none;
|
||||
use time::OffsetDateTime;
|
||||
use tokio::{
|
||||
fs::File,
|
||||
io::{AsyncReadExt, AsyncWriteExt},
|
||||
};
|
||||
|
||||
use crate::upload::UploadedFile;
|
||||
|
||||
const STATE_FILE_NAME: &str = "files.json";
|
||||
const MAX_STORAGE_FILES: usize = 1024;
|
||||
|
||||
|
@ -33,55 +36,16 @@ pub fn is_valid_storage_code(s: &str) -> bool {
|
|||
.all(|c| c.is_ascii_alphanumeric() || c == &b'-')
|
||||
}
|
||||
|
||||
#[skip_serializing_none]
|
||||
#[derive(Clone, Deserialize, Serialize)]
|
||||
pub struct StoredFile {
|
||||
pub name: String,
|
||||
pub size: u64,
|
||||
#[serde(with = "timestamp")]
|
||||
#[serde(with = "crate::timestamp")]
|
||||
pub modtime: OffsetDateTime,
|
||||
#[serde(with = "timestamp")]
|
||||
#[serde(with = "crate::timestamp")]
|
||||
pub expiry: OffsetDateTime,
|
||||
}
|
||||
|
||||
pub(crate) mod timestamp {
|
||||
use core::fmt;
|
||||
|
||||
use serde::{de::Visitor, Deserializer, Serializer};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
pub(crate) fn serialize<S: Serializer>(
|
||||
time: &OffsetDateTime,
|
||||
ser: S,
|
||||
) -> Result<S::Ok, S::Error> {
|
||||
ser.serialize_i64(time.unix_timestamp())
|
||||
}
|
||||
|
||||
struct I64Visitor;
|
||||
|
||||
impl<'de> Visitor<'de> for I64Visitor {
|
||||
type Value = i64;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(formatter, "an integer")
|
||||
}
|
||||
|
||||
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E> {
|
||||
Ok(v)
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E> {
|
||||
Ok(v as i64)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn deserialize<'de, D: Deserializer<'de>>(
|
||||
de: D,
|
||||
) -> Result<OffsetDateTime, D::Error> {
|
||||
Ok(
|
||||
OffsetDateTime::from_unix_timestamp(de.deserialize_i64(I64Visitor)?)
|
||||
.unwrap_or_else(|_| OffsetDateTime::now_utc()),
|
||||
)
|
||||
}
|
||||
pub contents: Option<Vec<UploadedFile>>,
|
||||
}
|
||||
|
||||
async fn is_valid_entry(key: &str, info: &StoredFile, storage_dir: &Path) -> bool {
|
||||
|
|
38
src/timestamp.rs
Normal file
38
src/timestamp.rs
Normal file
|
@ -0,0 +1,38 @@
|
|||
use core::fmt;
|
||||
|
||||
use serde::{de::Visitor, Deserializer, Serializer};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
pub(crate) fn serialize<S: Serializer>(
|
||||
time: &OffsetDateTime,
|
||||
ser: S,
|
||||
) -> Result<S::Ok, S::Error> {
|
||||
ser.serialize_i64(time.unix_timestamp())
|
||||
}
|
||||
|
||||
struct I64Visitor;
|
||||
|
||||
impl<'de> Visitor<'de> for I64Visitor {
|
||||
type Value = i64;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(formatter, "an integer")
|
||||
}
|
||||
|
||||
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E> {
|
||||
Ok(v)
|
||||
}
|
||||
|
||||
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E> {
|
||||
Ok(v as i64)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn deserialize<'de, D: Deserializer<'de>>(
|
||||
de: D,
|
||||
) -> Result<OffsetDateTime, D::Error> {
|
||||
Ok(
|
||||
OffsetDateTime::from_unix_timestamp(de.deserialize_i64(I64Visitor)?)
|
||||
.unwrap_or_else(|_| OffsetDateTime::now_utc()),
|
||||
)
|
||||
}
|
|
@ -93,9 +93,11 @@ impl Actor for Uploader {
|
|||
|
||||
type Context = <Uploader as Actor>::Context;
|
||||
|
||||
#[derive(Clone, Deserialize, Serialize)]
|
||||
pub struct UploadedFile {
|
||||
pub name: String,
|
||||
pub size: u64,
|
||||
#[serde(with = "crate::timestamp")]
|
||||
pub modtime: OffsetDateTime,
|
||||
}
|
||||
|
||||
|
@ -273,7 +275,7 @@ impl Uploader {
|
|||
let (writer, name, size, modtime): (Box<dyn Write>, _, _, _) = if files.len() > 1 {
|
||||
info!("Wrapping in zipfile generator");
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let zip_writer = super::zip::ZipGenerator::new(files, writer);
|
||||
let zip_writer = super::zip::ZipGenerator::new(files.clone(), writer);
|
||||
let size = zip_writer.total_size();
|
||||
let download_filename = super::APP_NAME.to_owned()
|
||||
+ &now.format(FILENAME_DATE_FORMAT).unwrap()
|
||||
|
@ -293,6 +295,7 @@ impl Uploader {
|
|||
size,
|
||||
modtime,
|
||||
expiry: OffsetDateTime::now_utc() + lifetime * time::Duration::DAY,
|
||||
contents: if files.len() > 1 { Some(files) } else { None },
|
||||
};
|
||||
let state = self.app_state.clone();
|
||||
let storage_filename = self.storage_filename.clone();
|
||||
|
|
|
@ -37,6 +37,12 @@ fn file_entries_size(files: &[UploadedFile]) -> u64 {
|
|||
total
|
||||
}
|
||||
|
||||
pub fn file_data_offset(files: &[UploadedFile], idx: usize) -> u64 {
|
||||
file_entries_size(&files[..idx])
|
||||
+ LOCAL_HEADER_SIZE_MINUS_FILENAME
|
||||
+ files[idx].name.len() as u64
|
||||
}
|
||||
|
||||
fn central_directory_size(files: &[UploadedFile]) -> u64 {
|
||||
let mut total = 0;
|
||||
for file in files.iter() {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue