From 2b0d2d3a701d9936f0fabe0f6908cc759c210f63 Mon Sep 17 00:00:00 2001 From: Bastian Kleineidam Date: Mon, 11 Mar 2013 19:44:42 +0100 Subject: [PATCH] Fix error when multiple comics match --- doc/changelog.txt | 2 ++ dosage | 12 +++++++++--- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/doc/changelog.txt b/doc/changelog.txt index 856af784f..661feda4d 100644 --- a/doc/changelog.txt +++ b/doc/changelog.txt @@ -14,6 +14,8 @@ Fixes: - cmdline: Catch error when piping output to another program or file under Windows. Closes: GH bug #13 +- cmdline: Catch error when multiple comics match. + Closes: GH bug #16 - comics: Retry download on empty content to reduce empty file errors. diff --git a/dosage b/dosage index b5bf9e677..23df98b14 100755 --- a/dosage +++ b/dosage @@ -158,7 +158,6 @@ def getComics(options): for scraperobj in getScrapers(options.comic, options.basepath, options.adult, options.multimatch): errors += getStrips(scraperobj, options) finally: - out.context = '' events.getHandler().end() return errors @@ -166,7 +165,6 @@ def getComics(options): def getStrips(scraperobj, options): """Get all strips from a scraper.""" errors = 0 - out.context = scraperobj.getName() if options.all: numstrips = None elif options.numstrips: @@ -175,6 +173,7 @@ def getStrips(scraperobj, options): # get current strip numstrips = 1 try: + out.context = scraperobj.getName() for strip in scraperobj.getStrips(numstrips): _errors, skipped = saveComicStrip(strip, options.basepath) errors += _errors @@ -185,6 +184,8 @@ def getStrips(scraperobj, options): except Exception as msg: out.error(msg) errors += 1 + finally: + out.context = '' return errors @@ -292,7 +293,12 @@ def getScrapers(comics, basepath=None, adult=True, multiple_allowed=False): else: name = comic indexes = None - for scraperclass in scraper.find_scraperclasses(name, multiple_allowed=multiple_allowed): + try: + scraperclasses = scraper.find_scraperclasses(name, multiple_allowed=multiple_allowed) + except ValueError as msg: + out.error(msg) + continue + for scraperclass in scraperclasses: if not adult and scraperclass.adult: warn_adult(scraperclass) continue