From 6a2f57b13281c1b6f5ee65762c90046cd895378f Mon Sep 17 00:00:00 2001 From: Bastian Kleineidam Date: Wed, 19 Dec 2012 20:43:18 +0100 Subject: [PATCH] Support requests module >= 1.0 --- doc/changelog.txt | 7 +++++++ dosagelib/util.py | 23 +++++++++++++++++------ 2 files changed, 24 insertions(+), 6 deletions(-) diff --git a/doc/changelog.txt b/doc/changelog.txt index e13bde553..5a6996392 100644 --- a/doc/changelog.txt +++ b/doc/changelog.txt @@ -1,3 +1,10 @@ +Dosage 1.8 (released 19.12.2012) + +Changes: +- comics: Add compatibility to requests module >= 1.0. +- comics: Updated the comic list with the generator scripts. + + Dosage 1.7 (released 18.12.2012) Features: diff --git a/dosagelib/util.py b/dosagelib/util.py index bef619c4e..bb30be6e0 100644 --- a/dosagelib/util.py +++ b/dosagelib/util.py @@ -27,9 +27,16 @@ MaxContentBytes = 1024 * 1024 * 2 # 2 MB # Maximum content size for images MaxImageBytes = 1024 * 1024 * 20 # 20 MB +# Default number of retries +MaxRetries = 3 + # Default connection timeout ConnectionTimeoutSecs = 60 +if hasattr(requests, 'adapters'): + # requests >= 1.0 + requests.adapters.DEFAULT_RETRIES = MaxRetries + def tagre(tag, attribute, value, quote='"', before="", after=""): """Return a regular expression matching the given HTML tag, attribute and value. It matches the tag and attribute names case insensitive, @@ -218,21 +225,25 @@ def get_robotstxt_parser(url): return rp -def urlopen(url, referrer=None, retries=3, retry_wait_seconds=5, max_content_bytes=None, +def urlopen(url, referrer=None, max_content_bytes=None, timeout=ConnectionTimeoutSecs, session=None, raise_for_status=True): """Open an URL and return the response object.""" out.debug('Open URL %s' % url) - assert retries >= 0, 'invalid retry value %r' % retries - assert retry_wait_seconds > 0, 'invalid retry seconds value %r' % retry_wait_seconds headers = {'User-Agent': UserAgent} if referrer: headers['Referer'] = referrer - config = {"max_retries": retries} if session is None: session = requests + kwargs = { + "headers": headers, + "timeout": timeout, + } + if not hasattr(requests, 'adapters'): + # requests << 1.0 + kwargs["prefetch"] = False + kwargs["config"] = {"max_retries": MaxRetries} try: - req = session.get(url, headers=headers, config=config, - prefetch=False, timeout=timeout) + req = session.get(url, **kwargs) check_content_size(url, req.headers, max_content_bytes) if raise_for_status: req.raise_for_status()