2022-05-24 21:45:37 -04:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import asyncio
|
|
|
|
import getpass
|
|
|
|
import json
|
|
|
|
import math
|
|
|
|
import pathlib
|
|
|
|
import sys
|
2024-10-16 15:56:06 -04:00
|
|
|
import time
|
|
|
|
from unicodedata import east_asian_width
|
2024-04-03 13:01:51 -04:00
|
|
|
|
|
|
|
import aiofiles
|
2022-05-24 21:45:37 -04:00
|
|
|
from tqdm import tqdm
|
|
|
|
from websockets import connect
|
|
|
|
|
2024-04-03 13:01:51 -04:00
|
|
|
FILE_CHUNK_SIZE = 16384
|
|
|
|
|
2024-10-16 15:56:06 -04:00
|
|
|
FILE_NAME_DISPLAY_WIDTH = 24
|
|
|
|
FILE_NAME_DISPLAY_PADDING = 3
|
|
|
|
FILE_NAME_DISPLAY_UPDATE_PERIOD = 0.2
|
|
|
|
|
|
|
|
def rotating_segment(name, pos):
|
|
|
|
output = ''
|
|
|
|
total_width = 0
|
|
|
|
rotating_name = name + (' ' * FILE_NAME_DISPLAY_PADDING) + name
|
|
|
|
for char in rotating_name[pos:]:
|
|
|
|
char_width = 2 if east_asian_width(char) in ['F', 'W'] else 1
|
|
|
|
if char_width == 2 and total_width == FILE_NAME_DISPLAY_WIDTH - 1:
|
|
|
|
output += ' '
|
|
|
|
total_width += 1
|
|
|
|
else:
|
|
|
|
output += char
|
|
|
|
total_width += char_width
|
|
|
|
if total_width >= FILE_NAME_DISPLAY_WIDTH:
|
|
|
|
return output
|
|
|
|
|
2024-04-03 13:01:51 -04:00
|
|
|
async def file_loader(files):
|
2022-05-24 21:45:37 -04:00
|
|
|
with tqdm(desc="Total", total=sum(size for (path, size) in files), unit='B', unit_scale=True, leave=True, position=1) as total_progress:
|
|
|
|
for (path, size) in files:
|
2024-10-16 15:56:06 -04:00
|
|
|
if len(path.name) > FILE_NAME_DISPLAY_WIDTH:
|
|
|
|
file_name_display_pos = 0
|
|
|
|
last_desc_update_time = time.time()
|
|
|
|
desc = rotating_segment(path.name, 0)
|
|
|
|
else:
|
|
|
|
desc = path.name
|
|
|
|
with tqdm(desc=desc, total=size, unit='B', unit_scale=True, leave=True, position=0) as file_progress:
|
2024-04-03 13:01:51 -04:00
|
|
|
async with aiofiles.open(path, mode='rb') as f:
|
|
|
|
while True:
|
|
|
|
pos = await f.tell()
|
|
|
|
if pos >= size:
|
|
|
|
break
|
|
|
|
data = await f.read(min(FILE_CHUNK_SIZE, size - pos))
|
|
|
|
if data == b'':
|
2022-05-24 21:45:37 -04:00
|
|
|
tqdm.write("file ended early!")
|
|
|
|
exit(1)
|
2024-10-16 15:56:06 -04:00
|
|
|
if len(path.name) > FILE_NAME_DISPLAY_WIDTH and (now := time.time()) - last_desc_update_time > FILE_NAME_DISPLAY_UPDATE_PERIOD:
|
|
|
|
file_name_display_pos = (file_name_display_pos + 1) % (len(path.name) + FILE_NAME_DISPLAY_PADDING)
|
|
|
|
last_desc_update_time = now
|
|
|
|
file_progress.set_description(
|
|
|
|
desc=rotating_segment(path.name, file_name_display_pos),
|
|
|
|
refresh=False,
|
|
|
|
)
|
2022-05-24 21:45:37 -04:00
|
|
|
total_progress.update(len(data))
|
|
|
|
file_progress.update(len(data))
|
|
|
|
yield data
|
|
|
|
|
2023-06-11 05:57:17 -04:00
|
|
|
async def send(paths, host, password, lifetime, collection_name=None):
|
2022-05-24 21:45:37 -04:00
|
|
|
paths = [path for path in paths if path.is_file()]
|
|
|
|
fileMetadata = [
|
|
|
|
{
|
|
|
|
"name": path.name,
|
|
|
|
"size": path.stat().st_size,
|
|
|
|
"modtime": math.floor(path.stat().st_mtime * 1000),
|
|
|
|
} for path in paths
|
|
|
|
]
|
|
|
|
manifest = {
|
|
|
|
"files": fileMetadata,
|
|
|
|
"lifetime": lifetime,
|
|
|
|
"password": password,
|
|
|
|
}
|
2022-05-29 11:12:43 -04:00
|
|
|
if collection_name is not None:
|
|
|
|
manifest["collection_name"] = collection_name
|
|
|
|
|
2023-06-11 05:57:17 -04:00
|
|
|
async with connect("wss://{}/upload".format(host)) as ws:
|
2022-05-24 21:45:37 -04:00
|
|
|
await ws.send(json.dumps(manifest))
|
|
|
|
resp = json.loads(await ws.recv())
|
|
|
|
if resp["type"] != "ready":
|
|
|
|
print("unexpected response: {}".format(resp))
|
|
|
|
exit(1)
|
2023-06-11 05:57:17 -04:00
|
|
|
print("Download: https://{}/download?code={}".format(host, resp["code"]))
|
2022-05-24 21:45:37 -04:00
|
|
|
loader = file_loader([(paths[i], fileMetadata[i]["size"]) for i in range(len(paths))])
|
2024-04-03 13:01:51 -04:00
|
|
|
async for data in loader:
|
2022-05-24 21:45:37 -04:00
|
|
|
await ws.send(data)
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(description="Upload files to transbeam")
|
2022-05-24 22:10:01 -04:00
|
|
|
parser.add_argument("-l", "--lifetime", type=int, default=7, help="Lifetime in days for files (default 7)")
|
2023-06-11 05:57:17 -04:00
|
|
|
parser.add_argument("-H", "--host", type=str, default="transbeam.link", help="transbeam host (default transbeam.link)")
|
2022-05-29 11:12:43 -04:00
|
|
|
parser.add_argument("-n", "--collection-name", type=str, help="Name for a collection of multiple files")
|
2022-05-24 21:45:37 -04:00
|
|
|
parser.add_argument("files", type=pathlib.Path, nargs="+", help="Files to upload")
|
|
|
|
|
|
|
|
async def main():
|
|
|
|
args = parser.parse_args()
|
2022-05-29 11:12:43 -04:00
|
|
|
if len(args.files) == 1 and args.collection_name is not None:
|
|
|
|
print("--collection-name is only applicable when multiple files are being uploaded")
|
|
|
|
exit(1)
|
2022-05-24 21:45:37 -04:00
|
|
|
password = getpass.getpass()
|
2023-06-11 05:57:17 -04:00
|
|
|
await send(args.files, args.host, password, args.lifetime, args.collection_name)
|
2022-05-24 21:45:37 -04:00
|
|
|
|
|
|
|
asyncio.run(main())
|