wooo pretty colors

This commit is contained in:
xenofem 2024-04-14 22:55:01 -04:00
parent 6dffbcd320
commit dd6f3ff02c

View file

@ -107,10 +107,37 @@ SUGGESTED_WORKS_COUNT = 10
READONLY_FILE = stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
READONLY_DIR = READONLY_FILE | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
ANSI_RED = '\x1b[1;31m'
ANSI_GREEN = '\x1b[1;32m'
ANSI_YELLOW = '\x1b[1;33m'
ANSI_GRAY = '\x1b[1;90m'
ANSI_NORMAL = '\x1b[0m'
ANSI_LINECLEAR = '\x1b[2K\r'
debug_mode = False
def debug(s):
if debug_mode:
print(f'{time.strftime("%Y-%m-%d %H:%M:%S")} - {s}')
print(f'{ANSI_GRAY}{time.strftime("%Y-%m-%d %H:%M:%S")} - {s}{ANSI_NORMAL}')
def succ(s):
print(f'{ANSI_GREEN}{s}{ANSI_NORMAL}')
def warn(s):
print(f'{ANSI_YELLOW}{s}{ANSI_NORMAL}')
def err(s):
print(f'{ANSI_RED}{s}{ANSI_NORMAL}')
def count_progress(idx, count, thing):
if idx + 1 < count:
pref = ''
suf = '...'
end = ''
else:
pref = ANSI_GREEN
suf = ANSI_NORMAL
end = '\n'
print(f'{ANSI_LINECLEAR}{pref}{idx+1}/{count} {thing}{suf}', end=end)
def open_zipfile_with_encoding(path):
for enc in ["utf-8", "shift-jis", "shift-jisx0213"]:
@ -119,7 +146,7 @@ def open_zipfile_with_encoding(path):
except UnicodeDecodeError:
pass
print(f'{path} contains filenames with unknown character encoding!')
err(f'{path} contains filenames with unknown character encoding!')
exit(1)
def open_rarfile_with_encoding(path):
@ -128,7 +155,7 @@ def open_rarfile_with_encoding(path):
if all('<EFBFBD>' not in info.filename for info in rf.infolist()):
return rf
print(f'{path} contains filenames with unknown character encoding!')
err(f'{path} contains filenames with unknown character encoding!')
exit(1)
def readonly(path):
@ -408,7 +435,7 @@ def collate(args):
break
if collation_result and collator.index > 0:
print(f'Collated {collator.index} pages for {work_id}')
succ(f'Collated {collator.index} pages for {work_id}')
work_staging_dir.rename(work_collation_dir)
else:
if work_staging_dir.is_dir():
@ -503,20 +530,20 @@ class Collator:
def link_pdf(self, src):
with fitz.open(src) as pdf:
images = pdf_images(pdf, self.args.pdf_strategy)
if images is None:
image_extractors = pdf_image_extractors(pdf, self.args.pdf_strategy)
if image_extractors is None:
print(f'Failed to enumerate page images in PDF {src}')
return None
self.dest.mkdir(parents=True, exist_ok=True)
print(f'0 pages collated...', end='')
for (idx, image) in enumerate(images, start=self.index):
for (idx, extractor) in enumerate(image_extractors, start=self.index):
image = extractor()
file_path = self.dest / f'{idx:04d}.{image["ext"]}'
with open(file_path, 'wb') as f:
f.write(image["image"])
print(f'\x1b[2K\r{idx+1-self.index} pages collated...', end='')
print()
count_progress(idx, len(image_extractors), 'pages collated')
self.index += pdf.page_count
return True
@ -725,7 +752,7 @@ def display_sixel_pixmap(pixmap_bytes):
finally:
sixel_output_unref(output)
def pdf_images(pdf, strategy):
def pdf_image_extractors(pdf, strategy):
print(f'0/{pdf.page_count} pages analyzed...', end='')
image_extractors = []
for (idx, page) in enumerate(pdf):
@ -762,9 +789,9 @@ def pdf_images(pdf, strategy):
display_sixel_pixmap(pixmap.tobytes('png'))
choice = input(f'[N]ope out / [c]onvert page{"" if xref is None else " / e[x]tract image"} / [d]rop page / [s]how page? [n/c{"" if xref is None else "/x"}/d/s] ')
print(f'\x1b[2K\r{idx+1}/{pdf.page_count} pages analyzed...', end=('' if idx+1 < pdf.page_count else '\n'))
count_progress(idx, pdf.page_count, 'pages analyzed')
return (extractor() for extractor in image_extractors)
return image_extractors
def nfc(s):
return unicodedata.normalize('NFC', s)
@ -929,7 +956,7 @@ def standalone_image_size(filepath):
with Image.open(filepath) as im:
return im.size
except UnidentifiedImageError:
print(f'Warning: PIL failed to load image {filepath}! Retrying with less strict settings')
warn(f'PIL failed to load image {filepath}! Retrying with less strict settings')
PIL.ImageFile.LOAD_TRUNCATED_IMAGES = True
try:
with Image.open(filepath) as im:
@ -1171,7 +1198,7 @@ def generate(args):
}
works.append(work)
print(f'\x1b[2K\r{idx+1} database entries read...', end='')
print(f'{ANSI_LINECLEAR}{idx+1} database entries read...', end='')
print()
for (idx, work) in enumerate(works):
@ -1191,7 +1218,7 @@ def generate(args):
with open(viewer_dir / 'index.html', 'w') as f:
f.write(viewer_template.render(depth=3, work=work, title=work['title']))
print(f'\x1b[2K\r{idx+1}/{len(works)} works processed...', end=('' if idx+1 < len(works) else '\n'))
count_progress(idx, len(works), 'works processed')
cache_con.commit()
uca = pyuca.Collator().sort_key
@ -1214,7 +1241,7 @@ def generate(args):
title=cat,
categorization=categorization,
))
print(f'\x1b[2K\r{idx+1}/{len(cats)} {categorization} processed...', end=('' if idx+1 < len(cats) else '\n'))
count_progress(idx, len(cats), f'{categorization} processed')
categorization_dir.mkdir(parents=True, exist_ok=True)
with open(categorization_dir / 'index.html', 'w') as f: