From aec87c8fd1c98ab4c1b81475e23179623cd868ec Mon Sep 17 00:00:00 2001 From: Mahmoud Hashemi Date: Sat, 4 Apr 2015 20:54:20 -0700 Subject: [PATCH] documenting 'cached' decorator --- boltons/cacheutils.py | 15 +++++++++++++++ docs/cacheutils.rst | 14 ++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/boltons/cacheutils.py b/boltons/cacheutils.py index 7e6d2f1..1134e80 100644 --- a/boltons/cacheutils.py +++ b/boltons/cacheutils.py @@ -390,6 +390,21 @@ class CachedFunction(object): def cached(cache, typed=False): + """\ + + Cache any function with the cache instance of your choosing. Note + that the function wrapped should take only `hashable`_ arguments. + + Args: + cache (Mapping): Any :class:`dict`-like object suitable for + use as a cache. Instances of the :class:`LRU` and + :class:`LRI` are good choices. + typed (bool): Whether to factor argument types into the cache + check. Default ``False``, setting to ``True`` causes the + cache keys for ``3`` and ``3.0`` to be considered unequal. + + .. _hashable: https://docs.python.org/2/glossary.html#term-hashable + """ def cached_func_decorator(func): return CachedFunction(func, cache, typed=typed) diff --git a/docs/cacheutils.rst b/docs/cacheutils.rst index fa9014e..8d18441 100644 --- a/docs/cacheutils.rst +++ b/docs/cacheutils.rst @@ -28,3 +28,17 @@ LRU has threadsafety built in. .. autoclass:: boltons.cacheutils.LRU :members: + + +Automatic function caching +-------------------------- + +Continuing in the theme of cache tunability and experimentation, +``cacheutils`` also offers a way to pluggably cache function return +values: the :func:`cached` function decorator. + +.. autofunction:: boltons.cacheutils.cached + +Similar functionality can be found in Python 3.4's :mod:`functools` +module, though it is made for cache pluggability and does not support +sharing the cache object across multiple functions.