Read scraper modules from user data directory

This allows users to add scrapers without setting up a complete Python
development environment.
This commit is contained in:
Tobias Gruetzmacher 2020-10-04 23:24:05 +02:00
parent 0bdf3dd94b
commit d487485815
6 changed files with 64 additions and 5 deletions

View file

@ -3,7 +3,11 @@
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2020 Tobias Gruetzmacher
import argparse
import contextlib
import os
from pathlib import Path
import appdirs
from . import events, configuration, singleton, director
from . import AppName, __version__
@ -36,6 +40,10 @@ strips of all of them:
"""
# Making our config roaming seems sensible
userdirs = appdirs.AppDirs(appname=AppName, appauthor=False, roaming=True)
def setup_options():
"""Construct option parser.
@return: new option parser
@ -230,6 +238,7 @@ def run(options):
if not options.comic:
out.warn(u'No comics specified, bailing out!')
return 1
add_user_scrapers()
if options.modulehelp:
return display_help(options)
if options.vote:
@ -237,8 +246,37 @@ def run(options):
return director.getComics(options)
def add_user_scrapers():
"""Add extra comic modules from the user data directory. This uses two
different locations: The "system-native" location and paths matching the
XDG basedir spec. While XDG isn't a thing on macOS and Windows, some users
(and developers) like to use these paths cross-plattform, therefore we
support both."""
dirs = set()
dirs.add(userdirs.user_data_dir)
with xdg_system():
dirs.add(userdirs.user_data_dir)
dirs = (Path(x) / 'plugins' for x in dirs)
for d in dirs:
allscrapers.adddir(d)
@contextlib.contextmanager
def xdg_system():
"""context manager to do something with appdirs while forcing the system to
be "linux2", which implements the XDG base dir spec.
"""
oldsys = appdirs.system
appdirs.system = 'linux2'
try:
yield
finally:
appdirs.system = oldsys
def do_list(column_list=True, verbose=False, listall=False):
"""List available comics."""
add_user_scrapers()
with out.pager():
out.info(u'Available comic scrapers:')
out.info(u'Comics tagged with [{}] require age confirmation'

View file

@ -1,3 +1,4 @@
appdirs
colorama
imagesize
lxml>=4.0.0

View file

@ -32,6 +32,7 @@ project_urls =
[options]
packages = find:
install_requires =
appdirs
colorama
imagesize
lxml>=4.0.0

View file

@ -1,6 +1,7 @@
# SPDX-License-Identifier: MIT
# Copyright (C) 2019-2020 Tobias Gruetzmacher
import time
from pathlib import Path
import pytest
@ -12,3 +13,14 @@ def _nosleep(monkeypatch):
pass
monkeypatch.setattr(time, 'sleep', sleep)
class FakeAppdirs:
@property
def user_data_dir(self):
return str(Path(__file__).parent / 'mocks')
@pytest.fixture()
def _noappdirs(monkeypatch):
monkeypatch.setattr('dosagelib.cmd.userdirs', FakeAppdirs())

View file

@ -26,16 +26,22 @@ def cmd_err(*options):
assert cmd(*options) == 1
@pytest.mark.usefixtures("_nosleep")
@pytest.mark.usefixtures('_nosleep', '_noappdirs')
class TestDosage(object):
"""Test the dosage commandline client."""
# This shouldn't hit the network at all, so add responses without mocks to
# make sure it doesn't do that
@responses.activate
def test_list_comics(self):
for option in ("-l", "--list", "--singlelist"):
cmd_ok(option)
@pytest.mark.parametrize(('option'), [
('-l'),
('--list'),
('--singlelist'),
])
def test_list_comics(self, option, capfd):
cmd_ok(option)
out, err = capfd.readouterr()
assert 'ADummyTestScraper' in out
@responses.activate
def test_display_version(self):

View file

@ -11,12 +11,13 @@ from dosagelib.plugins.s import SoloLeveling
from dosagelib.plugins.smackjeeves import SmackJeeves
from dosagelib.scraper import GeoblockedException
def cmd(*options):
"""'Fake' run dosage with given options."""
assert dosagelib.cmd.main(("--allow-multiple",) + options) == 0
@pytest.mark.usefixtures("_nosleep")
@pytest.mark.usefixtures('_nosleep', '_noappdirs')
class TestModules(object):
"""Test that specific comic modules work correctly."""