Source code for Aeros.patches.flask_caching.Cache

from flask_caching import Cache as OriginalCache
from flask_caching import *


[docs]class Cache(OriginalCache): def __init__(self, timeout: int = 60 * 60, threshold: int = 100, *args, **kwargs): OriginalCache.__init__(self, config={}, *args, **kwargs) self.config["CACHE_TYPE"] = "null" self.config["CACHE_THRESHOLD"] = threshold self.config["CACHE_DEFAULT_TIMEOUT"] = timeout
[docs] def cached( self, timeout=None, key_prefix="view/%s", unless=None, forced_update=None, response_filter=None, query_string=False, hash_method=hashlib.md5, cache_none=False, ): """Decorator. Use this to cache a function. By default the cache key is `view/request.path`. You are able to use this decorator with any function by changing the `key_prefix`. If the token `%s` is located within the `key_prefix` then it will replace that with `request.path` Example:: # An example view function @cache.cached(timeout=50) def big_foo(): return big_bar_calc() # An example misc function to cache. @cache.cached(key_prefix='MyCachedList') def get_list(): return [random.randrange(0, 1) for i in range(50000)] my_list = get_list() .. note:: You MUST have a request context to actually called any functions that are cached. .. versionadded:: 0.4 The returned decorated function now has three function attributes assigned to it. These attributes are readable/writable. **uncached** The original undecorated function **cache_timeout** The cache timeout value for this function. For a custom value to take affect, this must be set before the function is called. **make_cache_key** A function used in generating the cache_key used. :param timeout: Default None. If set to an integer, will cache for that amount of time. Unit of time is in seconds. :param key_prefix: Default 'view/%(request.path)s'. Beginning key to . use for the cache key. `request.path` will be the actual request path, or in cases where the `make_cache_key`-function is called from other views it will be the expected URL for the view as generated by Flask's `url_for()`. .. versionadded:: 0.3.4 Can optionally be a callable which takes no arguments but returns a string that will be used as the cache_key. :param unless: Default None. Cache will *always* execute the caching facilities unless this callable is true. This will bypass the caching entirely. :param forced_update: Default None. If this callable is true, cache value will be updated regardless cache is expired or not. Useful for background renewal of cached functions. :param response_filter: Default None. If not None, the callable is invoked after the cached funtion evaluation, and is given one arguement, the response content. If the callable returns False, the content will not be cached. Useful to prevent caching of code 500 responses. :param query_string: Default False. When True, the cache key used will be the result of hashing the ordered query string parameters. This avoids creating different caches for the same query just because the parameters were passed in a different order. See _make_cache_key_query_string() for more details. :param hash_method: Default hashlib.md5. The hash method used to generate the keys for cached results. :param cache_none: Default False. If set to True, add a key exists check when cache.get returns None. This will likely lead to wrongly returned None values in concurrent situations and is not recommended to use. """ def decorator(f): @functools.wraps(f) async def decorated_function(*args, **kwargs): #: Bypass the cache entirely. if self._bypass_cache(unless, f, *args, **kwargs): return await f(*args, **kwargs) try: cache_key = "{}{}{}".format(f.__name__, args, kwargs) if ( callable(forced_update) and ( forced_update(*args, **kwargs) if wants_args(forced_update) else forced_update() ) is True ): rv = None found = False else: rv = self.cache.get(cache_key) found = True # If the value returned by cache.get() is None, it # might be because the key is not found in the cache # or because the cached value is actually None if rv is None: # If we're sure we don't need to cache None values # (cache_none=False), don't bother checking for # key existence, as it can lead to false positives # if a concurrent call already cached the # key between steps. This would cause us to # return None when we shouldn't if not cache_none: found = False else: found = self.cache.has(cache_key) except Exception: if self.app.debug: raise logger.exception("Exception possibly due to cache backend.") return await f(*args, **kwargs) if not found: rv = await f(*args, **kwargs) if response_filter is None or response_filter(rv): try: self.cache.set( cache_key, rv, timeout=decorated_function.cache_timeout, ) except Exception: if self.app.debug: raise logger.exception( "Exception possibly due to cache backend." ) return rv def make_cache_key(*args, **kwargs): # Convert non-keyword arguments (which is the way # `make_cache_key` expects them) to keyword arguments # (the way `url_for` expects them) argspec_args = inspect.getfullargspec(f).args for arg_name, arg in zip(argspec_args, args): kwargs[arg_name] = arg return _make_cache_key(args, kwargs, use_request=False) def _make_cache_key_query_string(): """Create consistent keys for query string arguments. Produces the same cache key regardless of argument order, e.g., both `?limit=10&offset=20` and `?offset=20&limit=10` will always produce the same exact cache key. """ # Create a tuple of (key, value) pairs, where the key is the # argument name and the value is its respective value. Order # this tuple by key. Doing this ensures the cache key created # is always the same for query string args whose keys/values # are the same, regardless of the order in which they are # provided. args_as_sorted_tuple = tuple( sorted((pair for pair in request.args.items(multi=True))) ) # ... now hash the sorted (key, value) tuple so it can be # used as a key for cache. Turn them into bytes so that the # hash function will accept them args_as_bytes = str(args_as_sorted_tuple).encode() hashed_args = str(hash_method(args_as_bytes).hexdigest()) cache_key = request.path + hashed_args return cache_key def _make_cache_key(args, kwargs, use_request): if callable(key_prefix): cache_key = key_prefix() elif "%s" in key_prefix: if use_request: cache_key = key_prefix % request.path else: cache_key = key_prefix % url_for(f.__name__, **kwargs) else: cache_key = key_prefix return cache_key decorated_function.uncached = f decorated_function.cache_timeout = timeout decorated_function.make_cache_key = make_cache_key return decorated_function return decorator