2020-04-18 11:45:44 +00:00
|
|
|
# SPDX-License-Identifier: MIT
|
2014-01-05 15:50:57 +00:00
|
|
|
# Copyright (C) 2013-2014 Bastian Kleineidam
|
2020-10-01 16:49:14 +00:00
|
|
|
# Copyright (C) 2015-2020 Tobias Gruetzmacher
|
2020-10-01 19:54:30 +00:00
|
|
|
from pathlib import Path
|
|
|
|
|
2016-03-07 00:08:57 +00:00
|
|
|
import pytest
|
2020-10-01 19:54:30 +00:00
|
|
|
|
2020-10-01 16:49:14 +00:00
|
|
|
from dosagelib.scraper import scrapers
|
2013-02-18 18:59:16 +00:00
|
|
|
|
|
|
|
|
2016-03-07 00:08:57 +00:00
|
|
|
class TestScraper(object):
|
2013-02-18 18:59:16 +00:00
|
|
|
"""Test scraper module functions."""
|
|
|
|
|
2016-04-13 20:05:44 +00:00
|
|
|
def test_get_scrapers(self):
|
2020-10-01 16:49:14 +00:00
|
|
|
for scraperobj in scrapers.get():
|
2016-04-13 20:05:44 +00:00
|
|
|
scraperobj.indexes = ["bla"]
|
|
|
|
assert scraperobj.url, "missing url in %s" % scraperobj.name
|
2013-02-18 18:59:16 +00:00
|
|
|
|
2016-04-13 20:05:44 +00:00
|
|
|
def test_find_scrapers_single(self):
|
2020-10-01 16:49:14 +00:00
|
|
|
result = scrapers.find("xkcd")
|
2016-03-07 00:08:57 +00:00
|
|
|
assert len(result) == 1
|
2013-02-18 18:59:16 +00:00
|
|
|
|
2016-04-13 20:05:44 +00:00
|
|
|
def test_find_scrapers_multi(self):
|
2020-10-01 16:49:14 +00:00
|
|
|
result = scrapers.find("a", multiple_allowed=True)
|
2016-03-07 00:08:57 +00:00
|
|
|
assert len(result) > 1
|
2013-02-18 18:59:16 +00:00
|
|
|
|
2016-04-13 20:05:44 +00:00
|
|
|
def test_find_scrapers_error(self):
|
2020-04-18 11:03:02 +00:00
|
|
|
with pytest.raises(ValueError, match='empty comic name'):
|
2020-10-01 16:49:14 +00:00
|
|
|
scrapers.find('')
|
2020-10-01 19:54:30 +00:00
|
|
|
|
|
|
|
def test_user_dir(self):
|
|
|
|
oldlen = len(scrapers.get())
|
|
|
|
scrapers.adddir(Path(__file__).parent / 'mocks' / 'plugins')
|
|
|
|
assert len(scrapers.get()) == oldlen + 1
|
|
|
|
assert len(scrapers.find('ADummyTestScraper')) == 1
|