Use tuples rather than lists.
This commit is contained in:
parent
adb31d84af
commit
97522bc5ae
2 changed files with 8 additions and 8 deletions
|
@ -4,7 +4,7 @@
|
||||||
import requests
|
import requests
|
||||||
import time
|
import time
|
||||||
from . import loader
|
from . import loader
|
||||||
from .util import fetchUrl, fetchUrls, getPageContent, makeList
|
from .util import fetchUrl, fetchUrls, getPageContent, makeSequence
|
||||||
from .comic import ComicStrip
|
from .comic import ComicStrip
|
||||||
from .output import out
|
from .output import out
|
||||||
from .events import getHandler
|
from .events import getHandler
|
||||||
|
@ -104,10 +104,10 @@ class _BasicScraper(object):
|
||||||
imageUrls = fetchUrls(url, data, baseUrl, self.imageSearch)
|
imageUrls = fetchUrls(url, data, baseUrl, self.imageSearch)
|
||||||
imageUrls = set(map(self.imageUrlModifier, imageUrls))
|
imageUrls = set(map(self.imageUrlModifier, imageUrls))
|
||||||
if len(imageUrls) > 1 and not self.multipleImagesPerStrip:
|
if len(imageUrls) > 1 and not self.multipleImagesPerStrip:
|
||||||
patterns = [x.pattern for x in makeList(self.imageSearch)]
|
patterns = [x.pattern for x in makeSequence(self.imageSearch)]
|
||||||
out.warn("found %d images instead of 1 at %s with patterns %s" % (len(imageUrls), url, patterns))
|
out.warn("found %d images instead of 1 at %s with patterns %s" % (len(imageUrls), url, patterns))
|
||||||
elif not imageUrls:
|
elif not imageUrls:
|
||||||
patterns = [x.pattern for x in makeList(self.imageSearch)]
|
patterns = [x.pattern for x in makeSequence(self.imageSearch)]
|
||||||
out.warn("found no images at %s with patterns %s" % (url, patterns))
|
out.warn("found no images at %s with patterns %s" % (url, patterns))
|
||||||
return ComicStrip(self.getName(), url, imageUrls, self.namer, self.session)
|
return ComicStrip(self.getName(), url, imageUrls, self.namer, self.session)
|
||||||
|
|
||||||
|
|
|
@ -147,18 +147,18 @@ def getImageObject(url, referrer, session, max_content_bytes=MaxImageBytes):
|
||||||
return urlopen(url, session, referrer=referrer, max_content_bytes=max_content_bytes)
|
return urlopen(url, session, referrer=referrer, max_content_bytes=max_content_bytes)
|
||||||
|
|
||||||
|
|
||||||
def makeList(item):
|
def makeSequence(item):
|
||||||
"""If tiem is already a list or tuple, return it.
|
"""If item is already a list or tuple, return it.
|
||||||
Else return a list with item as single element."""
|
Else return a tuple with item as single element."""
|
||||||
if isinstance(item, (list, tuple)):
|
if isinstance(item, (list, tuple)):
|
||||||
return item
|
return item
|
||||||
return [item]
|
return (item,)
|
||||||
|
|
||||||
|
|
||||||
def fetchUrls(url, data, baseUrl, urlSearch):
|
def fetchUrls(url, data, baseUrl, urlSearch):
|
||||||
"""Search all entries for given URL pattern(s) in a HTML page."""
|
"""Search all entries for given URL pattern(s) in a HTML page."""
|
||||||
searchUrls = []
|
searchUrls = []
|
||||||
searches = makeList(urlSearch)
|
searches = makeSequence(urlSearch)
|
||||||
for search in searches:
|
for search in searches:
|
||||||
for match in search.finditer(data):
|
for match in search.finditer(data):
|
||||||
searchUrl = match.group(1)
|
searchUrl = match.group(1)
|
||||||
|
|
Loading…
Reference in a new issue