Compare commits
3 commits
007289ffe5
...
62e6d64253
Author | SHA1 | Date | |
---|---|---|---|
xenofem | 62e6d64253 | ||
xenofem | 8497b4137d | ||
xenofem | be4decde12 |
|
@ -16,8 +16,8 @@ use inotify::{Inotify, WatchMask};
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use pin_project_lite::pin_project;
|
use pin_project_lite::pin_project;
|
||||||
use serde::{de, Deserialize, Deserializer};
|
use serde::{de, Deserialize, Deserializer};
|
||||||
use time::OffsetDateTime;
|
|
||||||
use std::{os::unix::fs::MetadataExt, time::SystemTime};
|
use std::{os::unix::fs::MetadataExt, time::SystemTime};
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
body::{self, BoxBody, SizedStream},
|
body::{self, BoxBody, SizedStream},
|
||||||
|
@ -64,7 +64,9 @@ impl<'de> de::Visitor<'de> for SelectionVisitor {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||||
where E: de::Error {
|
where
|
||||||
|
E: de::Error,
|
||||||
|
{
|
||||||
if v == "all" {
|
if v == "all" {
|
||||||
Ok(DownloadSelection::All)
|
Ok(DownloadSelection::All)
|
||||||
} else if let Ok(n) = v.parse::<usize>() {
|
} else if let Ok(n) = v.parse::<usize>() {
|
||||||
|
@ -76,9 +78,7 @@ impl<'de> de::Visitor<'de> for SelectionVisitor {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for DownloadSelection {
|
impl<'de> Deserialize<'de> for DownloadSelection {
|
||||||
fn deserialize<D: Deserializer<'de>>(
|
fn deserialize<D: Deserializer<'de>>(de: D) -> Result<DownloadSelection, D::Error> {
|
||||||
de: D
|
|
||||||
) -> Result<DownloadSelection, D::Error> {
|
|
||||||
de.deserialize_any(SelectionVisitor)
|
de.deserialize_any(SelectionVisitor)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -122,8 +122,10 @@ impl DownloadingFile {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn baseline_offset(&self) -> u64 {
|
fn baseline_offset(&self) -> u64 {
|
||||||
if let (DownloadSelection::One(n), Some(files)) = (self.selection, self.info.contents.as_ref()) {
|
if let (DownloadSelection::One(n), Some(files)) =
|
||||||
crate::zip::file_data_offset(&files, n)
|
(self.selection, self.info.contents.as_ref())
|
||||||
|
{
|
||||||
|
crate::zip::file_data_offset(files, n)
|
||||||
} else {
|
} else {
|
||||||
0
|
0
|
||||||
}
|
}
|
||||||
|
@ -184,12 +186,7 @@ impl DownloadingFile {
|
||||||
|
|
||||||
res.insert_header((
|
res.insert_header((
|
||||||
header::CONTENT_RANGE,
|
header::CONTENT_RANGE,
|
||||||
format!(
|
format!("bytes {}-{}/{}", offset, offset + length - 1, total_size,),
|
||||||
"bytes {}-{}/{}",
|
|
||||||
offset,
|
|
||||||
offset + length - 1,
|
|
||||||
total_size,
|
|
||||||
),
|
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
res.insert_header((header::CONTENT_RANGE, format!("bytes */{}", length)));
|
res.insert_header((header::CONTENT_RANGE, format!("bytes */{}", length)));
|
||||||
|
@ -209,7 +206,12 @@ impl DownloadingFile {
|
||||||
.map_into_boxed_body();
|
.map_into_boxed_body();
|
||||||
}
|
}
|
||||||
|
|
||||||
let reader = new_live_reader(length, self.baseline_offset() + offset, self.file, self.storage_path);
|
let reader = new_live_reader(
|
||||||
|
length,
|
||||||
|
self.baseline_offset() + offset,
|
||||||
|
self.file,
|
||||||
|
self.storage_path,
|
||||||
|
);
|
||||||
|
|
||||||
if offset != 0 || length != total_size {
|
if offset != 0 || length != total_size {
|
||||||
res.status(StatusCode::PARTIAL_CONTENT);
|
res.status(StatusCode::PARTIAL_CONTENT);
|
||||||
|
|
83
src/main.rs
83
src/main.rs
|
@ -8,8 +8,8 @@ use std::{fmt::Debug, path::PathBuf, str::FromStr};
|
||||||
|
|
||||||
use actix_files::NamedFile;
|
use actix_files::NamedFile;
|
||||||
use actix_web::{
|
use actix_web::{
|
||||||
get, http::StatusCode, middleware::Logger, post, web, App, HttpRequest, HttpResponse,
|
error::InternalError, get, http::StatusCode, middleware::Logger, post, web, App, HttpRequest,
|
||||||
HttpServer, Responder,
|
HttpResponse, HttpServer, Responder,
|
||||||
};
|
};
|
||||||
use actix_web_actors::ws;
|
use actix_web_actors::ws;
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
|
@ -51,28 +51,28 @@ pub fn log_auth_failure(ip_addr: &str) {
|
||||||
warn!("Incorrect authentication attempt from {}", ip_addr);
|
warn!("Incorrect authentication attempt from {}", ip_addr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct DownloadRequest {
|
struct DownloadRequest {
|
||||||
code: String,
|
code: String,
|
||||||
download: Option<download::DownloadSelection>,
|
download: Option<download::DownloadSelection>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Template)]
|
#[derive(Serialize, Template)]
|
||||||
#[template(path = "download.html")]
|
#[template(path = "download.html")]
|
||||||
struct DownloadInfo<'a> {
|
struct DownloadInfo {
|
||||||
file: StoredFile,
|
file: StoredFile,
|
||||||
code: &'a str,
|
code: String,
|
||||||
available: u64,
|
available: u64,
|
||||||
|
offsets: Option<Vec<u64>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/download")]
|
#[get("/download")]
|
||||||
async fn handle_download(
|
async fn handle_download(
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
download: web::Query<DownloadRequest>,
|
query: web::Query<DownloadRequest>,
|
||||||
data: web::Data<AppState>,
|
data: web::Data<AppState>,
|
||||||
) -> actix_web::Result<HttpResponse> {
|
) -> actix_web::Result<HttpResponse> {
|
||||||
let code = &download.code;
|
let code = &query.code;
|
||||||
if !store::is_valid_storage_code(code) {
|
if !store::is_valid_storage_code(code) {
|
||||||
return not_found(req, data, true);
|
return not_found(req, data, true);
|
||||||
}
|
}
|
||||||
|
@ -80,12 +80,12 @@ async fn handle_download(
|
||||||
let info = if let Some(i) = info {
|
let info = if let Some(i) = info {
|
||||||
i
|
i
|
||||||
} else {
|
} else {
|
||||||
return not_found(req, data, true)
|
return not_found(req, data, true);
|
||||||
};
|
};
|
||||||
|
|
||||||
let storage_path = data.config.storage_dir.join(code);
|
let storage_path = data.config.storage_dir.join(code);
|
||||||
let file = File::open(&storage_path).await?;
|
let file = File::open(&storage_path).await?;
|
||||||
if let Some(selection) = download.download {
|
if let Some(selection) = query.download {
|
||||||
if let download::DownloadSelection::One(n) = selection {
|
if let download::DownloadSelection::One(n) = selection {
|
||||||
if let Some(ref files) = info.contents {
|
if let Some(ref files) = info.contents {
|
||||||
if n >= files.len() {
|
if n >= files.len() {
|
||||||
|
@ -101,28 +101,66 @@ async fn handle_download(
|
||||||
info,
|
info,
|
||||||
selection,
|
selection,
|
||||||
}
|
}
|
||||||
.into_response(&req))
|
.into_response(&req))
|
||||||
} else {
|
} else {
|
||||||
Ok(HttpResponse::Ok().body(DownloadInfo {
|
let offsets = info.contents.as_deref().map(zip::file_data_offsets);
|
||||||
file: info,
|
Ok(HttpResponse::Ok().body(
|
||||||
code,
|
DownloadInfo {
|
||||||
available: file.metadata().await?.len(),
|
file: info,
|
||||||
}.render().unwrap()))
|
code: code.clone(),
|
||||||
|
available: file.metadata().await?.len(),
|
||||||
|
offsets,
|
||||||
|
}
|
||||||
|
.render()
|
||||||
|
.unwrap(),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn not_found(
|
#[derive(Deserialize)]
|
||||||
|
struct InfoQuery {
|
||||||
|
code: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[get("/info")]
|
||||||
|
async fn download_info(
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
|
query: web::Query<InfoQuery>,
|
||||||
data: web::Data<AppState>,
|
data: web::Data<AppState>,
|
||||||
report: bool,
|
) -> actix_web::Result<impl Responder> {
|
||||||
) -> actix_web::Result<HttpResponse> {
|
let code = &query.code;
|
||||||
|
if !store::is_valid_storage_code(code) {
|
||||||
|
return not_found(req, data, true);
|
||||||
|
}
|
||||||
|
let info = data.file_store.read().await.lookup_file(code);
|
||||||
|
let info = if let Some(i) = info {
|
||||||
|
i
|
||||||
|
} else {
|
||||||
|
return not_found(req, data, true);
|
||||||
|
};
|
||||||
|
|
||||||
|
let storage_path = data.config.storage_dir.join(code);
|
||||||
|
let offsets = info.contents.as_deref().map(zip::file_data_offsets);
|
||||||
|
Ok(web::Json(DownloadInfo {
|
||||||
|
file: info,
|
||||||
|
code: code.clone(),
|
||||||
|
available: File::open(&storage_path).await?.metadata().await?.len(),
|
||||||
|
offsets,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn not_found<T>(req: HttpRequest, data: web::Data<AppState>, report: bool) -> actix_web::Result<T> {
|
||||||
if report {
|
if report {
|
||||||
let ip_addr = get_ip_addr(&req, data.config.reverse_proxy);
|
let ip_addr = get_ip_addr(&req, data.config.reverse_proxy);
|
||||||
log_auth_failure(&ip_addr);
|
log_auth_failure(&ip_addr);
|
||||||
}
|
}
|
||||||
Ok(NamedFile::open(data.config.static_dir.join("404.html"))?
|
Err(InternalError::from_response(
|
||||||
.set_status_code(StatusCode::NOT_FOUND)
|
"Download not found",
|
||||||
.into_response(&req))
|
NamedFile::open(data.config.static_dir.join("404.html"))?
|
||||||
|
.set_status_code(StatusCode::NOT_FOUND)
|
||||||
|
.into_response(&req),
|
||||||
|
)
|
||||||
|
.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/upload")]
|
#[get("/upload")]
|
||||||
|
@ -244,6 +282,7 @@ async fn main() -> std::io::Result<()> {
|
||||||
Logger::default()
|
Logger::default()
|
||||||
})
|
})
|
||||||
.service(handle_download)
|
.service(handle_download)
|
||||||
|
.service(download_info)
|
||||||
.service(handle_upload)
|
.service(handle_upload)
|
||||||
.service(check_upload_password)
|
.service(check_upload_password)
|
||||||
.service(upload_limits)
|
.service(upload_limits)
|
||||||
|
|
|
@ -3,10 +3,7 @@ use core::fmt;
|
||||||
use serde::{de::Visitor, Deserializer, Serializer};
|
use serde::{de::Visitor, Deserializer, Serializer};
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
pub(crate) fn serialize<S: Serializer>(
|
pub(crate) fn serialize<S: Serializer>(time: &OffsetDateTime, ser: S) -> Result<S::Ok, S::Error> {
|
||||||
time: &OffsetDateTime,
|
|
||||||
ser: S,
|
|
||||||
) -> Result<S::Ok, S::Error> {
|
|
||||||
ser.serialize_i64(time.unix_timestamp())
|
ser.serialize_i64(time.unix_timestamp())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -28,9 +25,7 @@ impl<'de> Visitor<'de> for I64Visitor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn deserialize<'de, D: Deserializer<'de>>(
|
pub(crate) fn deserialize<'de, D: Deserializer<'de>>(de: D) -> Result<OffsetDateTime, D::Error> {
|
||||||
de: D,
|
|
||||||
) -> Result<OffsetDateTime, D::Error> {
|
|
||||||
Ok(
|
Ok(
|
||||||
OffsetDateTime::from_unix_timestamp(de.deserialize_i64(I64Visitor)?)
|
OffsetDateTime::from_unix_timestamp(de.deserialize_i64(I64Visitor)?)
|
||||||
.unwrap_or_else(|_| OffsetDateTime::now_utc()),
|
.unwrap_or_else(|_| OffsetDateTime::now_utc()),
|
||||||
|
|
11
src/zip.rs
11
src/zip.rs
|
@ -43,6 +43,17 @@ pub fn file_data_offset(files: &[UploadedFile], idx: usize) -> u64 {
|
||||||
+ files[idx].name.len() as u64
|
+ files[idx].name.len() as u64
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn file_data_offsets(files: &[UploadedFile]) -> Vec<u64> {
|
||||||
|
let mut offsets = Vec::new();
|
||||||
|
let mut offset: u64 = 0;
|
||||||
|
for file in files.iter() {
|
||||||
|
offset += LOCAL_HEADER_SIZE_MINUS_FILENAME + file.name.len() as u64;
|
||||||
|
offsets.push(offset);
|
||||||
|
offset += file.size + DATA_DESCRIPTOR_SIZE;
|
||||||
|
}
|
||||||
|
offsets
|
||||||
|
}
|
||||||
|
|
||||||
fn central_directory_size(files: &[UploadedFile]) -> u64 {
|
fn central_directory_size(files: &[UploadedFile]) -> u64 {
|
||||||
let mut total = 0;
|
let mut total = 0;
|
||||||
for file in files.iter() {
|
for file in files.iter() {
|
||||||
|
|
|
@ -4,6 +4,7 @@ body {
|
||||||
max-width: 512px;
|
max-width: 512px;
|
||||||
margin: 0.5em auto;
|
margin: 0.5em auto;
|
||||||
padding: 0 1em;
|
padding: 0 1em;
|
||||||
|
overflow-wrap: break-word;
|
||||||
}
|
}
|
||||||
|
|
||||||
#header h1 {
|
#header h1 {
|
||||||
|
|
14
static/js/download.js
Normal file
14
static/js/download.js
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
document.addEventListener("DOMContentLoaded", () => {
|
||||||
|
const table = document.getElementById("download_contents").getElementsByTagName("tbody")[0];
|
||||||
|
if (table.children.length === 0) { return; }
|
||||||
|
|
||||||
|
setInterval(() => {
|
||||||
|
fetch(`info?code=${CODE}`)
|
||||||
|
.then((res) => res.json())
|
||||||
|
.then((info) => {
|
||||||
|
for (const [index, offset] of info.offsets.entries()) {
|
||||||
|
table.children[index].className = (offset > info.available) ? "unavailable" : "";
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}, 5000);
|
||||||
|
});
|
|
@ -4,10 +4,12 @@
|
||||||
<meta charset="utf-8"/>
|
<meta charset="utf-8"/>
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1"/>
|
<meta name="viewport" content="width=device-width, initial-scale=1"/>
|
||||||
<link rel="stylesheet" type="text/css" href="css/transbeam.css"/>
|
<link rel="stylesheet" type="text/css" href="css/transbeam.css"/>
|
||||||
<link rel="stylesheet" type="text/css" href="css/download.css"/>
|
|
||||||
<link rel="apple-touch-icon" href="images/site-icons/transbeam-apple.png"/>
|
<link rel="apple-touch-icon" href="images/site-icons/transbeam-apple.png"/>
|
||||||
<link rel="manifest" href="manifest.json"/>
|
<link rel="manifest" href="manifest.json"/>
|
||||||
<script src="js/util.js"></script>
|
<script src="js/util.js"></script>
|
||||||
|
<script type="text/javascript">
|
||||||
|
const CODE = "{{ code }}";
|
||||||
|
</script>
|
||||||
<script src="js/download.js"></script>
|
<script src="js/download.js"></script>
|
||||||
<title>{{ file.name }} - transbeam</title>
|
<title>{{ file.name }} - transbeam</title>
|
||||||
</head>
|
</head>
|
||||||
|
@ -25,16 +27,17 @@
|
||||||
{% when Some with (files) %}
|
{% when Some with (files) %}
|
||||||
<div id="download_contents" class="section">
|
<div id="download_contents" class="section">
|
||||||
<h3>Contents</h3>
|
<h3>Contents</h3>
|
||||||
<table>
|
<table><tbody>
|
||||||
|
{% let offsets = offsets.as_ref().unwrap() %}
|
||||||
{% for f in files %}
|
{% for f in files %}
|
||||||
<tr class="{% if zip::file_data_offset(files.as_ref(), loop.index0.clone()) > available %}unavailable{% endif %}">
|
<tr class="{% if offsets.get(loop.index0.clone()).unwrap().clone() > available %}unavailable{% endif %}">
|
||||||
<td class="file_size">{{ bytesize::to_string(f.size.clone(), false).replace(" ", "") }}</td>
|
<td class="file_size">{{ bytesize::to_string(f.size.clone(), false).replace(" ", "") }}</td>
|
||||||
<td class="file_name">{{ f.name }}</td>
|
<td class="file_name">{{ f.name }}</td>
|
||||||
<td class="file_download"><a class="download_button" href="download?code={{ code }}&download={{ loop.index0 }}"></a></td>
|
<td class="file_download"><a class="download_button" href="download?code={{ code }}&download={{ loop.index0 }}"></a></td>
|
||||||
<td class="file_unavailable"></td>
|
<td class="file_unavailable"></td>
|
||||||
</tr>
|
</tr>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</table>
|
</tbody></table>
|
||||||
</div>
|
</div>
|
||||||
{% else %}
|
{% else %}
|
||||||
{% endmatch %}
|
{% endmatch %}
|
||||||
|
|
Loading…
Reference in a new issue