Use tuples rather than lists.
This commit is contained in:
parent
adb31d84af
commit
97522bc5ae
2 changed files with 8 additions and 8 deletions
|
@ -4,7 +4,7 @@
|
|||
import requests
|
||||
import time
|
||||
from . import loader
|
||||
from .util import fetchUrl, fetchUrls, getPageContent, makeList
|
||||
from .util import fetchUrl, fetchUrls, getPageContent, makeSequence
|
||||
from .comic import ComicStrip
|
||||
from .output import out
|
||||
from .events import getHandler
|
||||
|
@ -104,10 +104,10 @@ class _BasicScraper(object):
|
|||
imageUrls = fetchUrls(url, data, baseUrl, self.imageSearch)
|
||||
imageUrls = set(map(self.imageUrlModifier, imageUrls))
|
||||
if len(imageUrls) > 1 and not self.multipleImagesPerStrip:
|
||||
patterns = [x.pattern for x in makeList(self.imageSearch)]
|
||||
patterns = [x.pattern for x in makeSequence(self.imageSearch)]
|
||||
out.warn("found %d images instead of 1 at %s with patterns %s" % (len(imageUrls), url, patterns))
|
||||
elif not imageUrls:
|
||||
patterns = [x.pattern for x in makeList(self.imageSearch)]
|
||||
patterns = [x.pattern for x in makeSequence(self.imageSearch)]
|
||||
out.warn("found no images at %s with patterns %s" % (url, patterns))
|
||||
return ComicStrip(self.getName(), url, imageUrls, self.namer, self.session)
|
||||
|
||||
|
|
|
@ -147,18 +147,18 @@ def getImageObject(url, referrer, session, max_content_bytes=MaxImageBytes):
|
|||
return urlopen(url, session, referrer=referrer, max_content_bytes=max_content_bytes)
|
||||
|
||||
|
||||
def makeList(item):
|
||||
"""If tiem is already a list or tuple, return it.
|
||||
Else return a list with item as single element."""
|
||||
def makeSequence(item):
|
||||
"""If item is already a list or tuple, return it.
|
||||
Else return a tuple with item as single element."""
|
||||
if isinstance(item, (list, tuple)):
|
||||
return item
|
||||
return [item]
|
||||
return (item,)
|
||||
|
||||
|
||||
def fetchUrls(url, data, baseUrl, urlSearch):
|
||||
"""Search all entries for given URL pattern(s) in a HTML page."""
|
||||
searchUrls = []
|
||||
searches = makeList(urlSearch)
|
||||
searches = makeSequence(urlSearch)
|
||||
for search in searches:
|
||||
for match in search.finditer(data):
|
||||
searchUrl = match.group(1)
|
||||
|
|
Loading…
Reference in a new issue