Allow multiple comic name matches.

This commit is contained in:
Bastian Kleineidam 2013-02-13 22:18:05 +01:00
parent 8a33871df8
commit 40de445d8c
2 changed files with 22 additions and 18 deletions

20
dosage
View file

@ -35,6 +35,7 @@ def setupOptions():
parser.add_option('-t', '--timestamps', action='store_true', dest='timestamps', default=False, help='print timestamps for all output at any info level') parser.add_option('-t', '--timestamps', action='store_true', dest='timestamps', default=False, help='print timestamps for all output at any info level')
parser.add_option('-o', '--output', action='store', dest='output', choices=events.getHandlers(), help='output formatting for downloaded comics') parser.add_option('-o', '--output', action='store', dest='output', choices=events.getHandlers(), help='output formatting for downloaded comics')
parser.add_option('--adult', action='store_true', dest='adult', default=False, help='confirms that you are old enough to view adult content') parser.add_option('--adult', action='store_true', dest='adult', default=False, help='confirms that you are old enough to view adult content')
parser.add_option('--multimatch', action='store_true', dest='multimatch', default=False, help='')
try: try:
import optcomplete import optcomplete
optcomplete.autocomplete(parser) optcomplete.autocomplete(parser)
@ -106,7 +107,7 @@ def getComics(options, comics):
events.installHandler(options.output, options.basepath, options.baseurl) events.installHandler(options.output, options.basepath, options.baseurl)
events.getHandler().start() events.getHandler().start()
try: try:
for scraperobj in getScrapers(comics, options.basepath, options.adult): for scraperobj in getScrapers(comics, options.basepath, options.adult, options.multimatch):
errors += getStrips(scraperobj, options) errors += getStrips(scraperobj, options)
except ValueError as msg: except ValueError as msg:
out.error(msg) out.error(msg)
@ -203,7 +204,7 @@ def getScraperName(scraperobj, limit=None):
return name + suffix return name + suffix
def getScrapers(comics, basepath=None, adult=True): def getScrapers(comics, basepath=None, adult=True, multiple_allowed=False):
"""Get scraper objects for the given comics.""" """Get scraper objects for the given comics."""
if '@' in comics: if '@' in comics:
# only scrapers whose directory already exists # only scrapers whose directory already exists
@ -234,14 +235,13 @@ def getScrapers(comics, basepath=None, adult=True):
else: else:
name = comic name = comic
indexes = None indexes = None
# XXX if matchmulti, search for a list of scrapers for scraperclass in scraper.find_scraperclasses(name, multiple_allowed=multiple_allowed):
scraperclass = scraper.get_scraper(name) if not adult and scraperclass.adult:
if not adult and scraperclass.adult: warn_adult(scraperclass)
warn_adult(scraperclass) continue
continue scraperobj = scraperclass(indexes=indexes)
scraperobj = scraperclass(indexes=indexes) if scraperobj not in scrapers:
if scraperobj not in scrapers: scrapers[scraperobj] = True
scrapers[scraperobj] = True
for scraperobj in scrapers: for scraperobj in scrapers:
yield scraperobj yield scraperobj

View file

@ -185,8 +185,10 @@ class _BasicScraper(object):
return self.starter() return self.starter()
def get_scraper(comic): def find_scraperclasses(comic, multiple_allowed=False):
"""Returns a comic module object.""" """Get a list comic scraper classes. Can return more than one entries if
multiple_allowed is True, else it raises a ValueError if multiple
modules match. The match is a case insensitive substring search."""
if not comic: if not comic:
raise ValueError("empty comic name") raise ValueError("empty comic name")
candidates = [] candidates = []
@ -195,16 +197,18 @@ def get_scraper(comic):
lname = scraperclass.get_name().lower() lname = scraperclass.get_name().lower()
if lname == cname: if lname == cname:
# perfect match # perfect match
return scraperclass if not multiple_allowed:
if cname in lname: return scraperclass
else:
candidates.append(scraperclass)
elif cname in lname:
candidates.append(scraperclass) candidates.append(scraperclass)
if len(candidates) == 1: if len(candidates) > 1 and not multiple_allowed:
return candidates[0]
elif candidates:
comics = ", ".join(x.get_name() for x in candidates) comics = ", ".join(x.get_name() for x in candidates)
raise ValueError('multiple comics found: %s' % comics) raise ValueError('multiple comics found: %s' % comics)
else: elif not candidates:
raise ValueError('comic %r not found' % comic) raise ValueError('comic %r not found' % comic)
return candidates
_scraperclasses = None _scraperclasses = None