Merge pull request #85 from sizlo/improveordering

Preserve the order of images in multi image strips for ordered symlink folders
This commit is contained in:
Tobias Gruetzmacher 2017-05-16 23:09:46 +02:00
commit 4ee99eb196
4 changed files with 24 additions and 4 deletions

View file

@ -287,6 +287,13 @@ class JSONEventHandler(EventHandler):
def comicDownloaded(self, comic, filename): def comicDownloaded(self, comic, filename):
"""Add URL-to-filename mapping into JSON.""" """Add URL-to-filename mapping into JSON."""
pageInfo = self.getPageInfo(comic.scraper, comic.referrer) pageInfo = self.getPageInfo(comic.scraper, comic.referrer)
# If there's already an image for this page start keeping track of their order
if len(pageInfo['images'].keys()) == 1:
pageInfo['imagesOrder'] = [pageInfo['images'].keys()[0]]
if 'imagesOrder' in pageInfo.keys():
pageInfo['imagesOrder'].append(comic.url)
pageInfo['images'][comic.url] = os.path.basename(filename) pageInfo['images'][comic.url] = os.path.basename(filename)
def comicPageLink(self, scraper, url, prevUrl): def comicPageLink(self, scraper, url, prevUrl):

View file

@ -27,7 +27,7 @@ except ImportError:
from . import loader, configuration, languages from . import loader, configuration, languages
from .util import (get_page, makeSequence, get_system_uid, urlopen, from .util import (get_page, makeSequence, get_system_uid, urlopen,
unescape, tagre, normaliseURL, prettyMatcherList, unescape, tagre, normaliseURL, prettyMatcherList,
requests_session) requests_session, uniq)
from .comic import ComicStrip from .comic import ComicStrip
from .output import out from .output import out
from .events import getHandler from .events import getHandler
@ -137,12 +137,12 @@ class Scraper(object):
# map modifier function on image URLs # map modifier function on image URLs
imageUrls = [self.imageUrlModifier(x, data) for x in imageUrls] imageUrls = [self.imageUrlModifier(x, data) for x in imageUrls]
# remove duplicate URLs # remove duplicate URLs
imageUrls = set(imageUrls) imageUrls = uniq(imageUrls)
if len(imageUrls) > 1 and not self.multipleImagesPerStrip: if len(imageUrls) > 1 and not self.multipleImagesPerStrip:
out.warn( out.warn(
u"Found %d images instead of 1 at %s with expressions %s" % u"Found %d images instead of 1 at %s with expressions %s" %
(len(imageUrls), url, prettyMatcherList(self.imageSearch))) (len(imageUrls), url, prettyMatcherList(self.imageSearch)))
image = sorted(imageUrls)[0] image = imageUrls[0]
out.warn(u"Choosing image %s" % image) out.warn(u"Choosing image %s" % image)
imageUrls = (image,) imageUrls = (image,)
elif not imageUrls: elif not imageUrls:

View file

@ -522,3 +522,12 @@ def strlimit(s, length=72):
if length == 0: if length == 0:
return "" return ""
return "%s..." % s[:length] return "%s..." % s[:length]
def uniq(input):
"""Remove duplicates from a list while preserving the list order"""
output = []
for item in input:
if item not in output:
output.append(item)
return output

View file

@ -57,6 +57,10 @@ def create_symlinks(d):
order = [] order = []
work = latest work = latest
while work in data["pages"]: while work in data["pages"]:
if "imagesOrder" in data["pages"][work].keys():
for url in reversed(data["pages"][work]["imagesOrder"]):
order.append(data["pages"][work]["images"][url])
else:
order.extend(data["pages"][work]["images"].values()) order.extend(data["pages"][work]["images"].values())
if "prev" in data["pages"][work]: if "prev" in data["pages"][work]:
work = data["pages"][work]["prev"] work = data["pages"][work]["prev"]