Replace custom @memoized with stdlib @lru_cache.

This commit is contained in:
Tobias Gruetzmacher 2016-10-29 00:45:43 +02:00
parent 9a6a310b76
commit a02660a7d3
3 changed files with 7 additions and 32 deletions

View file

@ -1,30 +0,0 @@
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2012-2014 Bastian Kleineidam
class memoized (object):
"""Decorator that caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned, and
not re-evaluated."""
def __init__(self, func):
"""Store function and initialize the cache."""
self.func = func
self.cache = {}
def __call__(self, *args, **kwargs):
"""Lookup and return cached result if found. Else call stored
function with given arguments."""
try:
return self.cache[args]
except KeyError:
self.cache[args] = value = self.func(*args, **kwargs)
return value
except TypeError:
# uncachable -- for instance, passing a list as an argument.
# Better to not cache than to blow up entirely.
return self.func(*args, **kwargs)
def __repr__(self):
"""Return the function's docstring."""
return self.func.__doc__

View file

@ -21,7 +21,11 @@ import subprocess
from six.moves.html_parser import HTMLParser
import six
from .decorators import memoized
try:
from functools import lru_cache
except ImportError:
from backports.functools_lru_cache import lru_cache
from .output import out
from .configuration import UserAgent, AppName, App, SupportUrl
@ -247,7 +251,7 @@ def check_robotstxt(url, session):
raise IOError("%s is disallowed by %s" % (url, roboturl))
@memoized
@lru_cache()
def get_robotstxt_parser(url, session=None):
"""Get a RobotFileParser for the given robots.txt URL."""
rp = RobotFileParser()

View file

@ -4,3 +4,4 @@ pbr
requests>=2.0
six
backports.shutil_get_terminal_size; python_version<'3.3'
backports.functools_lru_cache; python_version<'3.2'