Drop Python 2 support: six & other imports

This commit is contained in:
Tobias Gruetzmacher 2020-02-03 01:03:31 +01:00
parent c6cd914c5c
commit 9c65c3e05f
16 changed files with 40 additions and 68 deletions

View file

@ -5,9 +5,8 @@
from __future__ import absolute_import, division, print_function from __future__ import absolute_import, division, print_function
import os
import argparse import argparse
import six import os
from . import events, configuration, singleton, director, scraper from . import events, configuration, singleton, director, scraper
from . import AppName, __version__ from . import AppName, __version__
@ -168,8 +167,8 @@ def display_comic_help(scraperobj):
orig_context = out.context orig_context = out.context
out.context = scraperobj.name out.context = scraperobj.name
try: try:
out.info(u"URL: " + six.text_type(scraperobj.url)) out.info('URL: {}'.format(scraperobj.url))
out.info(u"Language: " + scraperobj.language()) out.info('Language: {}'.format(scraperobj.language()))
if scraperobj.adult: if scraperobj.adult:
out.info(u"Adult comic, use option --adult to fetch.") out.info(u"Adult comic, use option --adult to fetch.")
disabled = scraperobj.getDisabledReasons() disabled = scraperobj.getDisabledReasons()

View file

@ -8,9 +8,9 @@ from __future__ import absolute_import, division, print_function
import os import os
import threading import threading
from six.moves import _thread import _thread
from six.moves.queue import Queue, Empty from queue import Queue, Empty
from six.moves.urllib.parse import urlparse from urllib.parse import urlparse
from .output import out from .output import out
from . import events, scraper from . import events, scraper

View file

@ -7,7 +7,7 @@ from __future__ import absolute_import, division, print_function
import os import os
import time import time
from six.moves.urllib.parse import quote as url_quote from urllib.parse import quote as url_quote
import codecs import codecs
import json import json

View file

@ -10,7 +10,7 @@ from time import time, sleep
import requests import requests
from requests.adapters import HTTPAdapter from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry from requests.packages.urllib3.util.retry import Retry
from six.moves.urllib.parse import urlparse from urllib.parse import urlparse
from .configuration import UserAgent from .configuration import UserAgent

View file

@ -1,25 +1,21 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs # Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam # Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2016 Tobias Gruetzmacher # Copyright (C) 2015-2020 Tobias Gruetzmacher
from __future__ import absolute_import, division, print_function from __future__ import absolute_import, division, print_function
import time
import sys
import os
import threading
import traceback
import codecs import codecs
import contextlib import contextlib
import pydoc
import io import io
import six import os
import pydoc
import sys
import threading
import time
import traceback
try: from shutil import get_terminal_size
from shutil import get_terminal_size
except ImportError:
from backports.shutil_get_terminal_size import get_terminal_size
import colorama import colorama
from colorama import Fore, Style from colorama import Fore, Style
@ -111,8 +107,8 @@ class Output(object):
self.stream.write(u'%s%s> ' % (timestamp, get_threadname())) self.stream.write(u'%s%s> ' % (timestamp, get_threadname()))
if color and self.is_tty: if color and self.is_tty:
s = u'%s%s%s' % (color, s, Style.RESET_ALL) s = u'%s%s%s' % (color, s, Style.RESET_ALL)
self.stream.write(six.text_type(s)) self.stream.write(str(s))
self.stream.write(six.text_type(os.linesep)) self.stream.write(str(os.linesep))
self.stream.flush() self.stream.flush()
def writelines(self, lines, level=0): def writelines(self, lines, level=0):

View file

@ -7,7 +7,7 @@
from __future__ import absolute_import, division, print_function from __future__ import absolute_import, division, print_function
from re import compile from re import compile
from six.moves.urllib.parse import urljoin from urllib.parse import urljoin
from ..helpers import bounceStarter, xpath_class from ..helpers import bounceStarter, xpath_class
from ..scraper import _BasicScraper, _ParserScraper from ..scraper import _BasicScraper, _ParserScraper

View file

@ -7,7 +7,7 @@ from __future__ import absolute_import, division, print_function
import os import os
import re import re
from six.moves.urllib.parse import urljoin from urllib.parse import urljoin
from lxml import html, etree from lxml import html, etree
from lxml.html.defs import link_attrs as html_link_attrs from lxml.html.defs import link_attrs as html_link_attrs

View file

@ -5,30 +5,20 @@
from __future__ import absolute_import, division, print_function from __future__ import absolute_import, division, print_function
from six.moves.urllib.parse import (parse_qs, import html
quote as url_quote, unquote as url_unquote, urlparse, urlunparse, urlsplit)
from six.moves.urllib_robotparser import RobotFileParser
import requests
import sys
import os import os
import re import re
import traceback
import time
import subprocess import subprocess
import sys
import time
import traceback
try: from functools import lru_cache
import html from urllib.parse import (parse_qs, quote as url_quote, unquote as url_unquote,
except ImportError: urlparse, urlunparse, urlsplit)
# Python 2.7 from urllib.robotparser import RobotFileParser
from HTMLParser import HTMLParser
html = HTMLParser()
from six.moves import range
import six
try: import requests
from functools import lru_cache
except ImportError:
from backports.functools_lru_cache import lru_cache
from .output import out from .output import out
from .configuration import UserAgent, App, SupportUrl from .configuration import UserAgent, App, SupportUrl
@ -65,10 +55,7 @@ def get_nt_system_uid():
r"""Get the MachineGuid from r"""Get the MachineGuid from
HKEY_LOCAL_MACHINE\Software\Microsoft\Cryptography\MachineGuid HKEY_LOCAL_MACHINE\Software\Microsoft\Cryptography\MachineGuid
""" """
try: import winreg
import _winreg as winreg
except ImportError:
import winreg
lm = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) lm = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
try: try:
key = winreg.OpenKey(lm, r"Software\Microsoft\Cryptography") key = winreg.OpenKey(lm, r"Software\Microsoft\Cryptography")
@ -106,7 +93,7 @@ def backtick(cmd, encoding='utf-8'):
def unicode_safe(text, encoding=UrlEncoding, errors='ignore'): def unicode_safe(text, encoding=UrlEncoding, errors='ignore'):
"""Decode text to Unicode if not already done.""" """Decode text to Unicode if not already done."""
if isinstance(text, six.text_type): if isinstance(text, str):
return text return text
return text.decode(encoding, errors) return text.decode(encoding, errors)

View file

@ -2,6 +2,5 @@ colorama
imagesize imagesize
lxml lxml
requests>=2.0 requests>=2.0
six
cached_property; python_version<'3.8' cached_property; python_version<'3.8'
importlib_metadata; python_version<'3.8' importlib_metadata; python_version<'3.8'

View file

@ -10,7 +10,7 @@ processing.
from __future__ import absolute_import, division, print_function from __future__ import absolute_import, division, print_function
import sys import sys
from six.moves.urllib.parse import urlsplit from urllib.parse import urlsplit
from scriptutil import ComicListUpdater from scriptutil import ComicListUpdater

View file

@ -10,7 +10,7 @@ JSON file for further processing.
""" """
from __future__ import absolute_import, division, print_function from __future__ import absolute_import, division, print_function
from six.moves.urllib.parse import urlsplit from urllib.parse import urlsplit
from scriptutil import ComicListUpdater from scriptutil import ComicListUpdater
from dosagelib.util import check_robotstxt from dosagelib.util import check_robotstxt

View file

@ -1,21 +1,16 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs # Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam # Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2019 Tobias Gruetzmacher # Copyright (C) 2015-2020 Tobias Gruetzmacher
from __future__ import absolute_import, division, print_function from __future__ import absolute_import, division, print_function
import codecs
import json
import os import os
import re import re
import sys import sys
import time import time
import json
import codecs
try:
from os import replace as rename
except ImportError:
from os import rename
from lxml import html from lxml import html
@ -99,7 +94,7 @@ class ComicListUpdater(object):
for name, entry in sorted(data.items(), key=first_lower): for name, entry in sorted(data.items(), key=first_lower):
self.write_entry(newf, name, entry, min_comics, indent) self.write_entry(newf, name, entry, min_comics, indent)
self.copy_after_end(oldf, newf) self.copy_after_end(oldf, newf)
rename(filename + '.new', filename) os.replace(filename + '.new', filename)
def copy_until_start(self, src, dest): def copy_until_start(self, src, dest):
for line in src: for line in src:

View file

@ -8,7 +8,7 @@ JSON file for further processing.
""" """
from __future__ import absolute_import, division, print_function from __future__ import absolute_import, division, print_function
from six.moves.urllib.parse import urlsplit, parse_qs from urllib.parse import urlsplit, parse_qs
from scriptutil import ComicListUpdater from scriptutil import ComicListUpdater
from dosagelib.util import check_robotstxt from dosagelib.util import check_robotstxt

View file

@ -36,7 +36,6 @@ install_requires =
imagesize imagesize
lxml lxml
requests>=2.0 requests>=2.0
six
cached_property; python_version<'3.8' cached_property; python_version<'3.8'
importlib_metadata; python_version<'3.8' importlib_metadata; python_version<'3.8'

View file

@ -7,10 +7,7 @@ import gzip
import os.path import os.path
import re import re
try: from functools import lru_cache
from functools import lru_cache
except ImportError:
from backports.functools_lru_cache import lru_cache
from responses import add, GET from responses import add, GET

View file

@ -7,7 +7,7 @@ from __future__ import absolute_import, division, print_function
import re import re
import multiprocessing import multiprocessing
from six.moves.urllib.parse import urlsplit from urllib.parse import urlsplit
# Dictionary with per-host locks. # Dictionary with per-host locks.