Drop Python 2 support: six & other imports
This commit is contained in:
parent
c6cd914c5c
commit
9c65c3e05f
16 changed files with 40 additions and 68 deletions
|
@ -5,9 +5,8 @@
|
|||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import os
|
||||
import argparse
|
||||
import six
|
||||
import os
|
||||
|
||||
from . import events, configuration, singleton, director, scraper
|
||||
from . import AppName, __version__
|
||||
|
@ -168,8 +167,8 @@ def display_comic_help(scraperobj):
|
|||
orig_context = out.context
|
||||
out.context = scraperobj.name
|
||||
try:
|
||||
out.info(u"URL: " + six.text_type(scraperobj.url))
|
||||
out.info(u"Language: " + scraperobj.language())
|
||||
out.info('URL: {}'.format(scraperobj.url))
|
||||
out.info('Language: {}'.format(scraperobj.language()))
|
||||
if scraperobj.adult:
|
||||
out.info(u"Adult comic, use option --adult to fetch.")
|
||||
disabled = scraperobj.getDisabledReasons()
|
||||
|
|
|
@ -8,9 +8,9 @@ from __future__ import absolute_import, division, print_function
|
|||
|
||||
import os
|
||||
import threading
|
||||
from six.moves import _thread
|
||||
from six.moves.queue import Queue, Empty
|
||||
from six.moves.urllib.parse import urlparse
|
||||
import _thread
|
||||
from queue import Queue, Empty
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from .output import out
|
||||
from . import events, scraper
|
||||
|
|
|
@ -7,7 +7,7 @@ from __future__ import absolute_import, division, print_function
|
|||
|
||||
import os
|
||||
import time
|
||||
from six.moves.urllib.parse import quote as url_quote
|
||||
from urllib.parse import quote as url_quote
|
||||
import codecs
|
||||
import json
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ from time import time, sleep
|
|||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.packages.urllib3.util.retry import Retry
|
||||
from six.moves.urllib.parse import urlparse
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from .configuration import UserAgent
|
||||
|
||||
|
|
|
@ -1,25 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
|
||||
# Copyright (C) 2012-2014 Bastian Kleineidam
|
||||
# Copyright (C) 2015-2016 Tobias Gruetzmacher
|
||||
# Copyright (C) 2015-2020 Tobias Gruetzmacher
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import time
|
||||
import sys
|
||||
import os
|
||||
import threading
|
||||
import traceback
|
||||
import codecs
|
||||
import contextlib
|
||||
import pydoc
|
||||
import io
|
||||
import six
|
||||
import os
|
||||
import pydoc
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
|
||||
try:
|
||||
from shutil import get_terminal_size
|
||||
except ImportError:
|
||||
from backports.shutil_get_terminal_size import get_terminal_size
|
||||
from shutil import get_terminal_size
|
||||
|
||||
import colorama
|
||||
from colorama import Fore, Style
|
||||
|
@ -111,8 +107,8 @@ class Output(object):
|
|||
self.stream.write(u'%s%s> ' % (timestamp, get_threadname()))
|
||||
if color and self.is_tty:
|
||||
s = u'%s%s%s' % (color, s, Style.RESET_ALL)
|
||||
self.stream.write(six.text_type(s))
|
||||
self.stream.write(six.text_type(os.linesep))
|
||||
self.stream.write(str(s))
|
||||
self.stream.write(str(os.linesep))
|
||||
self.stream.flush()
|
||||
|
||||
def writelines(self, lines, level=0):
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from re import compile
|
||||
from six.moves.urllib.parse import urljoin
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from ..helpers import bounceStarter, xpath_class
|
||||
from ..scraper import _BasicScraper, _ParserScraper
|
||||
|
|
|
@ -7,7 +7,7 @@ from __future__ import absolute_import, division, print_function
|
|||
|
||||
import os
|
||||
import re
|
||||
from six.moves.urllib.parse import urljoin
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from lxml import html, etree
|
||||
from lxml.html.defs import link_attrs as html_link_attrs
|
||||
|
|
|
@ -5,30 +5,20 @@
|
|||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from six.moves.urllib.parse import (parse_qs,
|
||||
quote as url_quote, unquote as url_unquote, urlparse, urlunparse, urlsplit)
|
||||
from six.moves.urllib_robotparser import RobotFileParser
|
||||
import requests
|
||||
import sys
|
||||
import html
|
||||
import os
|
||||
import re
|
||||
import traceback
|
||||
import time
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
|
||||
try:
|
||||
import html
|
||||
except ImportError:
|
||||
# Python 2.7
|
||||
from HTMLParser import HTMLParser
|
||||
html = HTMLParser()
|
||||
from six.moves import range
|
||||
import six
|
||||
from functools import lru_cache
|
||||
from urllib.parse import (parse_qs, quote as url_quote, unquote as url_unquote,
|
||||
urlparse, urlunparse, urlsplit)
|
||||
from urllib.robotparser import RobotFileParser
|
||||
|
||||
try:
|
||||
from functools import lru_cache
|
||||
except ImportError:
|
||||
from backports.functools_lru_cache import lru_cache
|
||||
import requests
|
||||
|
||||
from .output import out
|
||||
from .configuration import UserAgent, App, SupportUrl
|
||||
|
@ -65,9 +55,6 @@ def get_nt_system_uid():
|
|||
r"""Get the MachineGuid from
|
||||
HKEY_LOCAL_MACHINE\Software\Microsoft\Cryptography\MachineGuid
|
||||
"""
|
||||
try:
|
||||
import _winreg as winreg
|
||||
except ImportError:
|
||||
import winreg
|
||||
lm = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
|
||||
try:
|
||||
|
@ -106,7 +93,7 @@ def backtick(cmd, encoding='utf-8'):
|
|||
|
||||
def unicode_safe(text, encoding=UrlEncoding, errors='ignore'):
|
||||
"""Decode text to Unicode if not already done."""
|
||||
if isinstance(text, six.text_type):
|
||||
if isinstance(text, str):
|
||||
return text
|
||||
return text.decode(encoding, errors)
|
||||
|
||||
|
|
|
@ -2,6 +2,5 @@ colorama
|
|||
imagesize
|
||||
lxml
|
||||
requests>=2.0
|
||||
six
|
||||
cached_property; python_version<'3.8'
|
||||
importlib_metadata; python_version<'3.8'
|
||||
|
|
|
@ -10,7 +10,7 @@ processing.
|
|||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import sys
|
||||
from six.moves.urllib.parse import urlsplit
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
from scriptutil import ComicListUpdater
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ JSON file for further processing.
|
|||
"""
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from six.moves.urllib.parse import urlsplit
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
from scriptutil import ComicListUpdater
|
||||
from dosagelib.util import check_robotstxt
|
||||
|
|
|
@ -1,21 +1,16 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
|
||||
# Copyright (C) 2012-2014 Bastian Kleineidam
|
||||
# Copyright (C) 2015-2019 Tobias Gruetzmacher
|
||||
# Copyright (C) 2015-2020 Tobias Gruetzmacher
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import codecs
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
import codecs
|
||||
|
||||
try:
|
||||
from os import replace as rename
|
||||
except ImportError:
|
||||
from os import rename
|
||||
|
||||
from lxml import html
|
||||
|
||||
|
@ -99,7 +94,7 @@ class ComicListUpdater(object):
|
|||
for name, entry in sorted(data.items(), key=first_lower):
|
||||
self.write_entry(newf, name, entry, min_comics, indent)
|
||||
self.copy_after_end(oldf, newf)
|
||||
rename(filename + '.new', filename)
|
||||
os.replace(filename + '.new', filename)
|
||||
|
||||
def copy_until_start(self, src, dest):
|
||||
for line in src:
|
||||
|
|
|
@ -8,7 +8,7 @@ JSON file for further processing.
|
|||
"""
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from six.moves.urllib.parse import urlsplit, parse_qs
|
||||
from urllib.parse import urlsplit, parse_qs
|
||||
|
||||
from scriptutil import ComicListUpdater
|
||||
from dosagelib.util import check_robotstxt
|
||||
|
|
|
@ -36,7 +36,6 @@ install_requires =
|
|||
imagesize
|
||||
lxml
|
||||
requests>=2.0
|
||||
six
|
||||
cached_property; python_version<'3.8'
|
||||
importlib_metadata; python_version<'3.8'
|
||||
|
||||
|
|
|
@ -7,10 +7,7 @@ import gzip
|
|||
import os.path
|
||||
import re
|
||||
|
||||
try:
|
||||
from functools import lru_cache
|
||||
except ImportError:
|
||||
from backports.functools_lru_cache import lru_cache
|
||||
from functools import lru_cache
|
||||
|
||||
from responses import add, GET
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ from __future__ import absolute_import, division, print_function
|
|||
|
||||
import re
|
||||
import multiprocessing
|
||||
from six.moves.urllib.parse import urlsplit
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
|
||||
# Dictionary with per-host locks.
|
||||
|
|
Loading…
Reference in a new issue