diff --git a/Makefile b/Makefile index 545d26ee3..d061694e5 100644 --- a/Makefile +++ b/Makefile @@ -6,7 +6,7 @@ ARCHIVE:=dosage-$(VERSION).tar.gz PY_FILES_DIRS := dosage dosagelib tests *.py PY2APPOPTS ?= NUMPROCESSORS:=$(shell grep -c processor /proc/cpuinfo) -MAXFAILEDTESTS:=200 +MAXFAILEDTESTS:=2 # Pytest options: # - stop after MAXFAILEDTESTS failed errors # - use multiple processors diff --git a/tests/test_comics.py b/tests/test_comics.py index 71bc4811b..58c49c7bc 100644 --- a/tests/test_comics.py +++ b/tests/test_comics.py @@ -12,40 +12,48 @@ class _ComicTester(TestCase): """Basic comic test class.""" scraperclass=None + def setUp(self): + self.name = self.scraperclass.get_name() + def test_comic(self): # Test a scraper. It must be able to traverse backward for # at least 5 pages from the start, and find strip images # on at least 4 pages. - module = self.scraperclass() + scraperobj = self.scraperclass() num = empty = 0 - for n, comics in izip(xrange(5), module): - if len(comics) == 0: + for n, strip in izip(xrange(5), scraperobj.getAllStrips()): + images = strips.getImages() + if len(images) == 0: empty += 1 - for comic in comics: - self.save(comic) + for image in images: + self.save(image) num += 1 - self.assertTrue(num >= 4, 'Traversal failed after %d strips.' % num) - self.assertTrue(empty <= 1, 'Failed to find images on %d pages.' % empty) + self.check(num >= 4, 'traversal failed after %d strips.' % num) + self.check(empty <= 1, 'failed to find images on %d pages.' % empty) - def save(self, comic): + def save(self, image): # create a temporary directory tmpdir = tempfile.mkdtemp() try: - filename, saved = comic.save(tmpdir) - self.assertTrue(saved, 'Could not save comic %s to %s' % (comic, tmpdir)) + image.save(tmpdir) + except Exception, msg: + self.check(False, 'could not save to %s: %s' % (tmpdir, msg)) finally: shutil.rmtree(tmpdir) + def check(self, condition, msg): + self.assertTrue(condition, "%s: %s" % (self.name, msg)) + def generate_comic_testers(): """For each comic scraper, create a test class. This currently generates over 4000 test classes (one for each comic), so this takes a while.""" - for s in scraper.items(): - name = 'Test'+s.__name__ + for scraperclass in scraper.get_scrapers(): + name = 'Test'+scraperclass.__name__ globals()[name] = type(name, (_ComicTester,), - dict(scraperclass=s) + dict(scraperclass=scraperclass) ) generate_comic_testers()