Compare commits

...

3 Commits

Author SHA1 Message Date
xenofem 437cbcd73f add omake to epilogue regex 2024-02-08 05:29:44 -05:00
xenofem 83c5e128d6 recognize back covers as well as front covers 2024-02-08 05:11:17 -05:00
xenofem f5f338f386 add short aliases for subcommands 2024-02-08 05:03:40 -05:00
1 changed files with 14 additions and 10 deletions

View File

@ -27,9 +27,10 @@ FANZA_ID_REGEX = re.compile('^d_[0-9]+$')
FAKKU_ID_REGEX = re.compile('.*_FAKKU$')
TEXTLESS_REGEX = re.compile('(台詞|セリフ|せりふ|テキスト|文字)((な|無)し|抜き)|notext|textless', re.IGNORECASE)
EPILOGUE_REGEX = re.compile('after|後日談', re.IGNORECASE)
EPILOGUE_REGEX = re.compile('after|後日談|おまけ', re.IGNORECASE)
HI_RES_REGEX = re.compile('高解像度', re.IGNORECASE)
COVER_REGEX = re.compile('表紙|cover|hyoushi', re.IGNORECASE)
FRONT_COVER_REGEX = re.compile('(^|[^裏])表紙|cover|hyoushi', re.IGNORECASE)
BACK_COVER_REGEX = re.compile('裏表紙', re.IGNORECASE)
ALT_VERSIONS = [
'褐色',
'日焼け',
@ -525,14 +526,14 @@ def collate_from_paths(srcs, dest, start_index, exclude):
if textless_split != False:
return textless_split
cover_split = try_collate_split_regex(srcs, dest, start_index, exclude, earlier=FRONT_COVER_REGEX, later=BACK_COVER_REGEX)
if cover_split != False:
return cover_split
epilogue_split = try_collate_split_regex(srcs, dest, start_index, exclude, later=EPILOGUE_REGEX)
if epilogue_split != False:
return epilogue_split
cover_split = try_collate_split_regex(srcs, dest, start_index, exclude, earlier=COVER_REGEX)
if cover_split != False:
return cover_split
if all(src.is_file() and is_image(src) for src in srcs):
ordering = complete_prefix_number_ordering(srcs)
if ordering:
@ -854,7 +855,7 @@ argparser.add_argument(
)
subparsers = argparser.add_subparsers(title="subcommands", required=True)
parser_extract = subparsers.add_parser('extract', help='extract zipfiles')
parser_extract = subparsers.add_parser('extract', aliases=['x', 'ex'], help='extract zipfiles')
parser_extract.add_argument(
'-r', '--remove',
action='store_true',
@ -869,7 +870,7 @@ parser_extract.add_argument(
)
parser_extract.set_defaults(func=extract)
parser_fetch = subparsers.add_parser('fetch', help='fetch metadata and thumbnails')
parser_fetch = subparsers.add_parser('fetch', aliases=['f', 'fet'], help='fetch metadata and thumbnails')
parser_fetch.add_argument(
'-l', '--locale',
type=str,
@ -882,6 +883,7 @@ parser_fetch.set_defaults(func=fetch)
parser_collate = subparsers.add_parser(
'collate',
aliases=['c', 'co', 'col'],
help='collate each work into a sequence of image files',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent("""\
@ -928,6 +930,7 @@ parser_collate.set_defaults(func=collate)
parser_manual_collate = subparsers.add_parser(
'manual-collate',
aliases=['mc', 'man', 'manual'],
help='collate a single work manually',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent("""\
@ -970,11 +973,11 @@ parser_manual_collate.add_argument(
)
parser_manual_collate.set_defaults(func=manual_collate)
parser_analyze = subparsers.add_parser('analyze', help='analyze an extracted folder to assist in collation')
parser_analyze = subparsers.add_parser('analyze', aliases=['a', 'an', 'anal'], help='analyze an extracted folder to assist in collation')
parser_analyze.add_argument('work_id')
parser_analyze.set_defaults(func=analyze)
parser_metadata = subparsers.add_parser('metadata', help='view or modify metadata for a work')
parser_metadata = subparsers.add_parser('metadata', aliases=['m', 'me', 'meta'], help='view or modify metadata for a work')
parser_metadata.add_argument('work_id')
parser_metadata.add_argument(
'--virtual',
@ -985,6 +988,7 @@ parser_metadata.set_defaults(func=metadata)
parser_generate = subparsers.add_parser(
'generate',
aliases=['g', 'gen'],
help='generate HTML/CSS/JS for library site',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent("""\