2020-04-18 11:45:44 +00:00
|
|
|
# SPDX-License-Identifier: MIT
|
2023-10-29 18:05:20 +00:00
|
|
|
# SPDX-FileCopyrightText: © 2004 Tristan Seligmann and Jonathan Jacobs
|
|
|
|
# SPDX-FileCopyrightText: © 2012 Bastian Kleineidam
|
|
|
|
# SPDX-FileCopyrightText: © 2015 Tobias Gruetzmacher
|
2023-11-19 21:15:24 +00:00
|
|
|
# PYTHON_ARGCOMPLETE_OK
|
|
|
|
from __future__ import annotations
|
|
|
|
|
2016-05-16 12:57:47 +00:00
|
|
|
import argparse
|
2023-11-19 21:15:24 +00:00
|
|
|
import contextlib
|
|
|
|
import importlib
|
2020-02-03 00:03:31 +00:00
|
|
|
import os
|
2020-12-24 13:05:48 +00:00
|
|
|
import platform
|
2023-11-19 21:15:24 +00:00
|
|
|
from collections.abc import Iterable
|
2020-10-04 21:24:05 +00:00
|
|
|
|
2022-05-29 23:04:10 +00:00
|
|
|
from platformdirs import PlatformDirs
|
2016-05-16 12:57:47 +00:00
|
|
|
|
2020-10-01 16:49:14 +00:00
|
|
|
from . import events, configuration, singleton, director
|
2019-06-19 05:12:43 +00:00
|
|
|
from . import AppName, __version__
|
2016-05-16 12:57:47 +00:00
|
|
|
from .output import out
|
2022-06-04 08:56:25 +00:00
|
|
|
from .scraper import scrapers as scrapercache
|
2016-05-16 12:57:47 +00:00
|
|
|
from .util import internal_error, strlimit
|
|
|
|
|
|
|
|
|
|
|
|
class ArgumentParser(argparse.ArgumentParser):
|
|
|
|
"""Custom argument parser."""
|
|
|
|
|
2023-11-19 21:15:24 +00:00
|
|
|
def print_help(self, file=None) -> None:
|
2016-05-16 12:57:47 +00:00
|
|
|
"""Paginate help message on TTYs."""
|
|
|
|
with out.pager():
|
|
|
|
out.info(self.format_help())
|
|
|
|
|
2017-05-14 22:54:02 +00:00
|
|
|
|
2022-05-29 23:04:10 +00:00
|
|
|
# Making our config roaming seems sensible
|
|
|
|
platformdirs = PlatformDirs(appname=AppName, appauthor=False, roaming=True, opinion=True)
|
|
|
|
user_plugin_path = platformdirs.user_data_path / 'plugins'
|
|
|
|
|
|
|
|
|
|
|
|
ExtraHelp = f"""\
|
2016-05-16 12:57:47 +00:00
|
|
|
EXAMPLES
|
|
|
|
List available comics:
|
|
|
|
dosage -l
|
|
|
|
|
|
|
|
Get the latest comic of for example CalvinAndHobbes and save it in the "Comics"
|
|
|
|
directory:
|
|
|
|
dosage CalvinAndHobbes
|
|
|
|
|
|
|
|
If you already have downloaded several comics and want to get the latest
|
|
|
|
strips of all of them:
|
|
|
|
dosage --continue @
|
|
|
|
|
2022-05-29 23:04:10 +00:00
|
|
|
User plugin directory: {user_plugin_path}
|
|
|
|
"""
|
2020-10-04 21:24:05 +00:00
|
|
|
|
|
|
|
|
2023-11-19 21:15:24 +00:00
|
|
|
def setup_options() -> ArgumentParser:
|
2016-05-16 12:57:47 +00:00
|
|
|
"""Construct option parser.
|
|
|
|
@return: new option parser
|
|
|
|
@rtype argparse.ArgumentParser
|
|
|
|
"""
|
|
|
|
parser = ArgumentParser(
|
|
|
|
description="A comic downloader and archiver.",
|
2022-05-29 23:04:10 +00:00
|
|
|
epilog=ExtraHelp,
|
2016-05-16 12:57:47 +00:00
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
|
|
|
|
|
|
|
parser.add_argument('-v', '--verbose', action='count', default=0,
|
2018-06-29 17:26:17 +00:00
|
|
|
help='provides verbose output, use multiple times for more verbosity')
|
2016-05-16 12:57:47 +00:00
|
|
|
parser.add_argument('-n', '--numstrips', action='store', type=int, default=0,
|
2020-01-12 23:36:46 +00:00
|
|
|
help='traverse and retrieve the given number of comic strips;'
|
|
|
|
' use --all to retrieve all comic strips')
|
2016-05-16 12:57:47 +00:00
|
|
|
parser.add_argument('-a', '--all', action='store_true',
|
2018-06-29 17:26:17 +00:00
|
|
|
help='traverse and retrieve all comic strips')
|
2016-05-16 12:57:47 +00:00
|
|
|
parser.add_argument('-c', '--continue', action='store_true', dest='cont',
|
2018-06-29 17:26:17 +00:00
|
|
|
help='traverse and retrieve comic strips until an existing one is found')
|
2023-11-19 21:15:24 +00:00
|
|
|
basepath_opt = parser.add_argument('-b', '--basepath', action='store',
|
|
|
|
default='Comics', metavar='PATH',
|
2018-06-29 17:26:17 +00:00
|
|
|
help='set the path to create invidivual comic directories in, default is Comics')
|
2016-05-16 12:57:47 +00:00
|
|
|
parser.add_argument('--baseurl', action='store', metavar='PATH',
|
2020-01-12 23:36:46 +00:00
|
|
|
help='the base URL of your comics directory (for RSS, HTML, etc.);'
|
|
|
|
' this should correspond to --base-path')
|
2016-05-16 12:57:47 +00:00
|
|
|
parser.add_argument('-l', '--list', action='store_true',
|
2018-06-29 17:26:17 +00:00
|
|
|
help='list available comic modules')
|
2016-05-16 12:57:47 +00:00
|
|
|
parser.add_argument('--singlelist', action='store_true',
|
2018-06-29 17:26:17 +00:00
|
|
|
help='list available comic modules in a single column list')
|
2016-05-16 12:57:47 +00:00
|
|
|
parser.add_argument('--version', action='store_true',
|
2018-06-29 17:26:17 +00:00
|
|
|
help='display the version number')
|
2016-05-16 12:57:47 +00:00
|
|
|
parser.add_argument('--vote', action='store_true',
|
2018-06-29 17:26:17 +00:00
|
|
|
help='vote for the selected comics')
|
2016-05-16 12:57:47 +00:00
|
|
|
parser.add_argument('-m', '--modulehelp', action='store_true',
|
2018-06-29 17:26:17 +00:00
|
|
|
help='display help for comic modules')
|
2016-05-16 12:57:47 +00:00
|
|
|
parser.add_argument('-t', '--timestamps', action='store_true',
|
2018-06-29 17:26:17 +00:00
|
|
|
help='print timestamps for all output at any info level')
|
2016-05-16 12:57:47 +00:00
|
|
|
parser.add_argument('-o', '--output', action='append', dest='handler',
|
2018-06-29 17:26:17 +00:00
|
|
|
choices=events.getHandlerNames(),
|
|
|
|
help='sets output handlers for downloaded comics')
|
2016-05-16 12:57:47 +00:00
|
|
|
parser.add_argument('--no-downscale', action='store_false',
|
2018-06-29 17:26:17 +00:00
|
|
|
dest='allowdownscale',
|
|
|
|
help='prevent downscaling when using html or rss handler')
|
2016-05-16 12:57:47 +00:00
|
|
|
parser.add_argument('-p', '--parallel', action='store', type=int, default=1,
|
2018-06-29 17:26:17 +00:00
|
|
|
help='fetch comics in parallel. Specify the number of connections')
|
2016-05-16 12:57:47 +00:00
|
|
|
parser.add_argument('--adult', action='store_true',
|
2018-06-29 17:26:17 +00:00
|
|
|
help='confirms that you are old enough to view adult content')
|
2016-05-16 12:57:47 +00:00
|
|
|
parser.add_argument('--allow-multiple', action='store_true',
|
2020-01-12 23:36:46 +00:00
|
|
|
help='allows multiple instances to run at the same time.'
|
|
|
|
' Use if you know what you are doing.')
|
2016-05-16 12:57:47 +00:00
|
|
|
# used for development testing prev/next matching
|
|
|
|
parser.add_argument('--dry-run', action='store_true',
|
2018-06-29 17:26:17 +00:00
|
|
|
help=argparse.SUPPRESS)
|
2016-06-05 19:47:58 +00:00
|
|
|
# List all comic modules, even those normally suppressed, because they
|
|
|
|
# are not "real" (moved & removed)
|
|
|
|
parser.add_argument('--list-all', action='store_true',
|
2018-06-29 17:26:17 +00:00
|
|
|
help=argparse.SUPPRESS)
|
2023-11-19 21:15:24 +00:00
|
|
|
comic_arg = parser.add_argument('comic', nargs='*',
|
2018-06-29 17:26:17 +00:00
|
|
|
help='comic module name (including case insensitive substrings)')
|
2023-11-19 21:15:24 +00:00
|
|
|
comic_arg.completer = scraper_completion
|
|
|
|
with contextlib.suppress(ImportError):
|
|
|
|
completers = importlib.import_module('argcomplete.completers')
|
|
|
|
basepath_opt.completer = completers.DirectoriesCompleter()
|
|
|
|
importlib.import_module('argcomplete').autocomplete(parser)
|
2016-05-16 12:57:47 +00:00
|
|
|
return parser
|
|
|
|
|
|
|
|
|
2023-11-19 21:15:24 +00:00
|
|
|
def scraper_completion(**kwargs) -> Iterable[str]:
|
|
|
|
"""Completion helper for argcomplete."""
|
|
|
|
scrapercache.adddir(user_plugin_path)
|
|
|
|
return (comic.name for comic in scrapercache.all())
|
|
|
|
|
|
|
|
|
2016-05-16 12:57:47 +00:00
|
|
|
def display_version(verbose):
|
|
|
|
"""Display application name, version, copyright and license."""
|
|
|
|
print(configuration.App)
|
2020-12-24 13:05:48 +00:00
|
|
|
print("Using Python {} ({}) on {}".format(platform.python_version(),
|
|
|
|
platform.python_implementation(), platform.platform()))
|
2016-05-16 12:57:47 +00:00
|
|
|
print(configuration.Copyright)
|
|
|
|
print(configuration.Freeware)
|
|
|
|
print("For support see", configuration.SupportUrl)
|
|
|
|
if verbose:
|
|
|
|
# search for updates
|
|
|
|
from .updater import check_update
|
2023-10-29 18:05:20 +00:00
|
|
|
try:
|
|
|
|
value = check_update()
|
2016-05-16 12:57:47 +00:00
|
|
|
if value:
|
|
|
|
version, url = value
|
|
|
|
if url is None:
|
|
|
|
# current version is newer than online version
|
|
|
|
text = ('Detected local or development version %(currentversion)s. '
|
|
|
|
'Available version of %(app)s is %(version)s.')
|
|
|
|
else:
|
|
|
|
# display update link
|
|
|
|
text = ('A new version %(version)s of %(app)s is '
|
|
|
|
'available at %(url)s.')
|
2022-05-28 15:52:42 +00:00
|
|
|
attrs = {'version': version, 'app': AppName,
|
|
|
|
'url': url, 'currentversion': __version__}
|
2016-05-16 12:57:47 +00:00
|
|
|
print(text % attrs)
|
2023-10-29 18:05:20 +00:00
|
|
|
except (IOError, KeyError) as err:
|
|
|
|
print(f'An error occured while checking for an update of {AppName}: {err!r}')
|
2016-05-16 12:57:47 +00:00
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
|
|
def set_output_info(options):
|
|
|
|
"""Set global output level and timestamp option."""
|
|
|
|
out.level = 0
|
|
|
|
out.level += options.verbose
|
|
|
|
out.timestamps = options.timestamps
|
|
|
|
|
|
|
|
|
|
|
|
def display_help(options):
|
|
|
|
"""Print help for comic strips."""
|
|
|
|
errors = 0
|
|
|
|
try:
|
|
|
|
for scraperobj in director.getScrapers(options.comic, options.basepath, listing=True):
|
|
|
|
errors += display_comic_help(scraperobj)
|
|
|
|
except ValueError as msg:
|
|
|
|
out.exception(msg)
|
|
|
|
return 2
|
|
|
|
return errors
|
|
|
|
|
|
|
|
|
|
|
|
def display_comic_help(scraperobj):
|
|
|
|
"""Print help for a comic."""
|
|
|
|
orig_context = out.context
|
|
|
|
out.context = scraperobj.name
|
|
|
|
try:
|
2020-02-03 00:03:31 +00:00
|
|
|
out.info('URL: {}'.format(scraperobj.url))
|
|
|
|
out.info('Language: {}'.format(scraperobj.language()))
|
2016-05-16 12:57:47 +00:00
|
|
|
if scraperobj.adult:
|
|
|
|
out.info(u"Adult comic, use option --adult to fetch.")
|
|
|
|
disabled = scraperobj.getDisabledReasons()
|
|
|
|
if disabled:
|
|
|
|
out.info(u"Disabled: " + " ".join(disabled.values()))
|
|
|
|
if scraperobj.help:
|
|
|
|
for line in scraperobj.help.splitlines():
|
|
|
|
out.info(line)
|
|
|
|
return 0
|
|
|
|
except ValueError as msg:
|
|
|
|
out.exception(msg)
|
|
|
|
return 1
|
|
|
|
finally:
|
|
|
|
out.context = orig_context
|
|
|
|
|
|
|
|
|
|
|
|
def vote_comics(options):
|
|
|
|
"""Vote for comics."""
|
|
|
|
errors = 0
|
|
|
|
try:
|
|
|
|
for scraperobj in director.getScrapers(options.comic, options.basepath,
|
2022-06-04 08:56:25 +00:00
|
|
|
options.adult):
|
2016-05-16 12:57:47 +00:00
|
|
|
errors += vote_comic(scraperobj)
|
|
|
|
except ValueError as msg:
|
|
|
|
out.exception(msg)
|
|
|
|
errors += 1
|
|
|
|
return errors
|
|
|
|
|
|
|
|
|
|
|
|
def vote_comic(scraperobj):
|
|
|
|
"""Vote for given comic scraper."""
|
|
|
|
errors = 0
|
|
|
|
orig_context = out.context
|
|
|
|
out.context = scraperobj.name
|
|
|
|
try:
|
2019-11-03 19:44:07 +00:00
|
|
|
scraperobj.vote()
|
|
|
|
out.info(u'Vote submitted.')
|
2016-05-16 12:57:47 +00:00
|
|
|
except Exception as msg:
|
|
|
|
out.exception(msg)
|
|
|
|
errors += 1
|
|
|
|
finally:
|
|
|
|
out.context = orig_context
|
|
|
|
return errors
|
|
|
|
|
|
|
|
|
|
|
|
def run(options):
|
|
|
|
"""Execute comic commands."""
|
|
|
|
set_output_info(options)
|
2022-06-04 08:56:25 +00:00
|
|
|
scrapercache.adddir(user_plugin_path)
|
2016-05-16 12:57:47 +00:00
|
|
|
# ensure only one instance of dosage is running
|
|
|
|
if not options.allow_multiple:
|
|
|
|
singleton.SingleInstance()
|
|
|
|
if options.version:
|
|
|
|
return display_version(options.verbose)
|
|
|
|
if options.list:
|
|
|
|
return do_list()
|
2016-06-05 19:47:58 +00:00
|
|
|
if options.singlelist or options.list_all:
|
|
|
|
return do_list(column_list=False, verbose=options.verbose,
|
|
|
|
listall=options.list_all)
|
2016-05-16 12:57:47 +00:00
|
|
|
# after this a list of comic strips is needed
|
|
|
|
if not options.comic:
|
|
|
|
out.warn(u'No comics specified, bailing out!')
|
|
|
|
return 1
|
|
|
|
if options.modulehelp:
|
|
|
|
return display_help(options)
|
|
|
|
if options.vote:
|
|
|
|
return vote_comics(options)
|
|
|
|
return director.getComics(options)
|
|
|
|
|
|
|
|
|
2016-06-05 19:47:58 +00:00
|
|
|
def do_list(column_list=True, verbose=False, listall=False):
|
2016-05-16 12:57:47 +00:00
|
|
|
"""List available comics."""
|
|
|
|
with out.pager():
|
|
|
|
out.info(u'Available comic scrapers:')
|
2018-06-29 17:26:17 +00:00
|
|
|
out.info(u'Comics tagged with [{}] require age confirmation'
|
|
|
|
' with the --adult option.'.format(TAG_ADULT))
|
2016-05-16 12:57:47 +00:00
|
|
|
out.info(u'Non-english comics are tagged with [%s].' % TAG_LANG)
|
2022-06-04 08:56:25 +00:00
|
|
|
scrapers = sorted(scrapercache.all(listall),
|
2016-06-05 19:47:58 +00:00
|
|
|
key=lambda s: s.name.lower())
|
2016-05-16 12:57:47 +00:00
|
|
|
if column_list:
|
|
|
|
num, disabled = do_column_list(scrapers)
|
|
|
|
else:
|
|
|
|
num, disabled = do_single_list(scrapers, verbose=verbose)
|
|
|
|
out.info(u'%d supported comics.' % num)
|
|
|
|
if disabled:
|
|
|
|
out.info('')
|
2018-06-29 17:26:17 +00:00
|
|
|
out.info(u'Some comics are disabled, they are tagged with'
|
|
|
|
' [{}:REASON], where REASON is one of:'.format(TAG_DISABLED))
|
2016-05-16 12:57:47 +00:00
|
|
|
for k in disabled:
|
|
|
|
out.info(u' %-10s %s' % (k, disabled[k]))
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
|
|
def do_single_list(scrapers, verbose=False):
|
|
|
|
"""Get list of scraper names, one per line."""
|
|
|
|
disabled = {}
|
2020-10-11 18:15:27 +00:00
|
|
|
for scraperobj in scrapers:
|
2016-05-16 12:57:47 +00:00
|
|
|
if verbose:
|
|
|
|
display_comic_help(scraperobj)
|
|
|
|
else:
|
|
|
|
out.info(get_tagged_scraper_name(scraperobj, reasons=disabled))
|
2020-10-11 18:15:27 +00:00
|
|
|
return len(scrapers) + 1, disabled
|
2016-05-16 12:57:47 +00:00
|
|
|
|
|
|
|
|
|
|
|
def do_column_list(scrapers):
|
|
|
|
"""Get list of scraper names with multiple names per line."""
|
|
|
|
disabled = {}
|
|
|
|
width = out.width
|
|
|
|
# limit name length so at least two columns are there
|
|
|
|
limit = (width // 2) - 8
|
|
|
|
names = [get_tagged_scraper_name(scraperobj, limit=limit, reasons=disabled)
|
|
|
|
for scraperobj in scrapers]
|
|
|
|
num = len(names)
|
|
|
|
maxlen = max(len(name) for name in names)
|
|
|
|
names_per_line = max(width // (maxlen + 1), 1)
|
|
|
|
while names:
|
|
|
|
out.info(u''.join(name.ljust(maxlen) for name in
|
|
|
|
names[:names_per_line]))
|
|
|
|
del names[:names_per_line]
|
|
|
|
return num, disabled
|
|
|
|
|
2017-05-14 22:54:02 +00:00
|
|
|
|
2016-05-16 12:57:47 +00:00
|
|
|
TAG_ADULT = "adult"
|
|
|
|
TAG_LANG = "lang"
|
|
|
|
TAG_DISABLED = "dis"
|
|
|
|
|
|
|
|
|
|
|
|
def get_tagged_scraper_name(scraperobj, limit=None, reasons=None):
|
|
|
|
"""Get comic scraper name."""
|
|
|
|
tags = []
|
|
|
|
if scraperobj.adult:
|
|
|
|
tags.append(TAG_ADULT)
|
|
|
|
if scraperobj.lang != "en":
|
|
|
|
tags.append("%s:%s" % (TAG_LANG, scraperobj.lang))
|
|
|
|
disabled = scraperobj.getDisabledReasons()
|
|
|
|
if disabled and reasons is not None:
|
|
|
|
reasons.update(disabled)
|
|
|
|
for reason in disabled:
|
|
|
|
tags.append("%s:%s" % (TAG_DISABLED, reason))
|
|
|
|
if tags:
|
|
|
|
suffix = " [" + ", ".join(tags) + "]"
|
|
|
|
else:
|
|
|
|
suffix = ""
|
|
|
|
name = scraperobj.name
|
|
|
|
if limit is not None:
|
|
|
|
name = strlimit(name, limit)
|
|
|
|
return name + suffix
|
|
|
|
|
|
|
|
|
2017-10-12 21:56:39 +00:00
|
|
|
def main(args=None):
|
2016-05-16 12:57:47 +00:00
|
|
|
"""Parse options and execute commands."""
|
|
|
|
try:
|
2017-10-12 21:56:39 +00:00
|
|
|
options = setup_options().parse_args(args=args)
|
2016-05-16 12:57:47 +00:00
|
|
|
options.basepath = os.path.expanduser(options.basepath)
|
2020-10-11 18:15:27 +00:00
|
|
|
return run(options)
|
2016-05-16 12:57:47 +00:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
print("Aborted.")
|
2020-10-11 18:15:27 +00:00
|
|
|
return 1
|
2016-05-16 12:57:47 +00:00
|
|
|
except Exception:
|
|
|
|
internal_error()
|
2020-10-11 18:15:27 +00:00
|
|
|
return 2
|