2020-04-18 11:45:44 +00:00
|
|
|
# SPDX-License-Identifier: MIT
|
2014-01-05 15:50:57 +00:00
|
|
|
# Copyright (C) 2013-2014 Bastian Kleineidam
|
2022-06-04 08:56:25 +00:00
|
|
|
# Copyright (C) 2015-2022 Tobias Gruetzmacher
|
2020-10-01 19:54:30 +00:00
|
|
|
from pathlib import Path
|
|
|
|
|
2016-03-07 00:08:57 +00:00
|
|
|
import pytest
|
2020-10-01 19:54:30 +00:00
|
|
|
|
2020-10-01 16:49:14 +00:00
|
|
|
from dosagelib.scraper import scrapers
|
2013-02-18 18:59:16 +00:00
|
|
|
|
|
|
|
|
2016-03-07 00:08:57 +00:00
|
|
|
class TestScraper(object):
|
2013-02-18 18:59:16 +00:00
|
|
|
"""Test scraper module functions."""
|
|
|
|
|
2016-04-13 20:05:44 +00:00
|
|
|
def test_get_scrapers(self):
|
2022-06-04 08:56:25 +00:00
|
|
|
for scraperobj in scrapers.all():
|
2016-04-13 20:05:44 +00:00
|
|
|
scraperobj.indexes = ["bla"]
|
|
|
|
assert scraperobj.url, "missing url in %s" % scraperobj.name
|
2013-02-18 18:59:16 +00:00
|
|
|
|
2016-04-13 20:05:44 +00:00
|
|
|
def test_find_scrapers_single(self):
|
2022-06-04 08:56:25 +00:00
|
|
|
assert scrapers.find("xkcd")
|
2013-02-18 18:59:16 +00:00
|
|
|
|
2016-04-13 20:05:44 +00:00
|
|
|
def test_find_scrapers_multi(self):
|
2022-06-04 08:56:25 +00:00
|
|
|
with pytest.raises(ValueError, match='multiple comics found'):
|
|
|
|
scrapers.find("a")
|
2013-02-18 18:59:16 +00:00
|
|
|
|
2016-04-13 20:05:44 +00:00
|
|
|
def test_find_scrapers_error(self):
|
2020-04-18 11:03:02 +00:00
|
|
|
with pytest.raises(ValueError, match='empty comic name'):
|
2020-10-01 16:49:14 +00:00
|
|
|
scrapers.find('')
|
2020-10-01 19:54:30 +00:00
|
|
|
|
|
|
|
def test_user_dir(self):
|
2022-06-04 08:56:25 +00:00
|
|
|
oldlen = len(scrapers.all())
|
2021-01-19 00:19:07 +00:00
|
|
|
scrapers.adddir(Path(__file__).parent / 'mocks' / 'extra')
|
2022-06-04 08:56:25 +00:00
|
|
|
assert len(scrapers.all()) == oldlen + 1
|
|
|
|
assert scrapers.find('AnotherDummyTestScraper')
|