Fix update scripts after htmlparser merge.

This commit is contained in:
Tobias Gruetzmacher 2015-04-20 19:42:12 +02:00
parent 3b33129e58
commit 64ad5468b9
6 changed files with 8 additions and 8 deletions

View file

@ -256,7 +256,7 @@ def handle_url(url, session, res):
"""Parse one search result page."""
print("Parsing", url, file=sys.stderr)
try:
data, baseUrl = getPageContent(url, session)
data = getPageContent(url, session)
except IOError as msg:
print("ERROR:", msg, file=sys.stderr)
return

View file

@ -372,7 +372,7 @@ def handle_url(url, session, res):
"""Parse one search result page."""
print("Parsing", url, file=sys.stderr)
try:
data, baseUrl = getPageContent(url, session)
data = getPageContent(url, session)
except IOError as msg:
print("ERROR:", msg, file=sys.stderr)
return

View file

@ -26,7 +26,7 @@ def handle_url(url, session, res):
"""Parse one search result page."""
print("Parsing", url, file=sys.stderr)
try:
data, baseUrl = getPageContent(url, session)
data = getPageContent(url, session)
except IOError as msg:
print("ERROR:", msg, file=sys.stderr)
return

View file

@ -169,7 +169,7 @@ exclude_comics = [
def handle_url(url, session, url_matcher, num_matcher, res):
"""Parse one search result page."""
try:
data, baseUrl = getPageContent(url, session)
data = getPageContent(url, session)
except IOError as msg:
print("ERROR:", msg, file=sys.stderr)
return

View file

@ -57,7 +57,7 @@ def handle_url(url, session, res):
"""Parse one search result page."""
print("Parsing", url, file=sys.stderr)
try:
data, baseUrl = getPageContent(url, session)
data = getPageContent(url, session)
except IOError as msg:
print("ERROR:", msg, file=sys.stderr)
return
@ -88,7 +88,7 @@ def handle_url(url, session, res):
def get_description(url, session):
"""Get comic strip description."""
try:
data, baseUrl = getPageContent(url, session)
data = getPageContent(url, session)
except IOError as msg:
print("ERROR:", msg, file=sys.stderr)
return u""

View file

@ -252,7 +252,7 @@ def handle_url(url, session, res):
"""Parse one search result page."""
print("Parsing", url, file=sys.stderr)
try:
data, baseUrl = getPageContent(url, session)
data = getPageContent(url, session)
except IOError as msg:
print("ERROR:", msg, file=sys.stderr)
return
@ -276,7 +276,7 @@ def handle_url(url, session, res):
# search for url in extra page
print("Getting", page_url)
try:
data2, baseUrl2 = getPageContent(page_url, session)
data2 = getPageContent(page_url, session)
except IOError as msg:
print("ERROR:", msg, file=sys.stderr)
return