mirror of
https://github.com/python/cpython.git
synced 2025-11-17 17:46:45 +00:00
Add example, tighten text, and minor clean-ups.
This commit is contained in:
parent
d01df46848
commit
7496b4171e
4 changed files with 51 additions and 38 deletions
|
|
@ -42,40 +42,52 @@ The :mod:`functools` module defines the following functions:
|
||||||
.. versionadded:: 3.2
|
.. versionadded:: 3.2
|
||||||
|
|
||||||
|
|
||||||
.. decorator:: lru_cache(maxsize)
|
.. decorator:: lru_cache(maxsize=100)
|
||||||
|
|
||||||
Decorator to wrap a function with a memoizing callable that saves up to the
|
Decorator to wrap a function with a memoizing callable that saves up to the
|
||||||
*maxsize* most recent calls. It can save time when an expensive or I/O bound
|
*maxsize* most recent calls. It can save time when an expensive or I/O bound
|
||||||
function is periodically called with the same arguments.
|
function is periodically called with the same arguments.
|
||||||
|
|
||||||
The *maxsize* parameter defaults to 100. Since a dictionary is used to cache
|
Since a dictionary is used to cache results, the positional and keyword
|
||||||
results, the positional and keyword arguments to the function must be
|
arguments to the function must be hashable.
|
||||||
hashable.
|
|
||||||
|
|
||||||
The wrapped function is instrumented with a :attr:`cache_info` attribute that
|
To help measure the effectiveness of the cache and tune the *maxsize*
|
||||||
can be called to retrieve a named tuple with the following fields:
|
parameter, the wrapped function is instrumented with a :func:`cache_info`
|
||||||
|
function that returns a :term:`named tuple` showing *hits*, *misses*,
|
||||||
|
*maxsize* and *currsize*.
|
||||||
|
|
||||||
- :attr:`maxsize`: maximum cache size (as set by the *maxsize* parameter)
|
The decorator also provides a :func:`cache_clear` function for clearing or
|
||||||
- :attr:`size`: current number of entries in the cache
|
invalidating the cache.
|
||||||
- :attr:`hits`: number of successful cache lookups
|
|
||||||
- :attr:`misses`: number of unsuccessful cache lookups.
|
|
||||||
|
|
||||||
These statistics are helpful for tuning the *maxsize* parameter and for measuring
|
|
||||||
the effectiveness of the cache.
|
|
||||||
|
|
||||||
The wrapped function also has a :attr:`cache_clear` attribute which can be
|
|
||||||
called (with no arguments) to clear the cache.
|
|
||||||
|
|
||||||
The original underlying function is accessible through the
|
The original underlying function is accessible through the
|
||||||
:attr:`__wrapped__` attribute. This allows introspection, bypassing
|
:attr:`__wrapped__` attribute. This is useful for introspection, for
|
||||||
the cache, or rewrapping the function with a different caching tool.
|
bypassing the cache, or for rewrapping the function with a different cache.
|
||||||
|
|
||||||
A `LRU (least recently used) cache
|
A `LRU (least recently used) cache
|
||||||
<http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used>`_
|
<http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used>`_ works
|
||||||
works best when more recent calls are the best predictors of upcoming calls
|
best when more recent calls are the best predictors of upcoming calls (for
|
||||||
(for example, the most popular articles on a news server tend to
|
example, the most popular articles on a news server tend to change daily).
|
||||||
change each day). The cache's size limit assurs that caching does not
|
The cache's size limit assures that the cache does not grow without bound on
|
||||||
grow without bound on long-running processes such as web servers.
|
long-running processes such as web servers.
|
||||||
|
|
||||||
|
Example -- Caching static web content::
|
||||||
|
|
||||||
|
@functools.lru_cache(maxsize=20)
|
||||||
|
def get_pep(num):
|
||||||
|
'Retrieve text of a Python Enhancement Proposal'
|
||||||
|
resource = 'http://www.python.org/dev/peps/pep-%04d/' % num
|
||||||
|
try:
|
||||||
|
with urllib.request.urlopen(resource) as s:
|
||||||
|
return s.read()
|
||||||
|
except urllib.error.HTTPError:
|
||||||
|
return 'Not Found'
|
||||||
|
|
||||||
|
>>> for n in 8, 290, 308, 320, 8, 218, 320, 279, 289, 320, 9991:
|
||||||
|
... pep = get_pep(n)
|
||||||
|
... print(n, len(pep))
|
||||||
|
|
||||||
|
>>> print(get_pep.cache_info())
|
||||||
|
CacheInfo(hits=3, misses=8, maxsize=20, currsize=8)
|
||||||
|
|
||||||
.. versionadded:: 3.2
|
.. versionadded:: 3.2
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -332,13 +332,14 @@ New, Improved, and Deprecated Modules
|
||||||
c.execute('SELECT phonenumber FROM phonelist WHERE name=?', (name,))
|
c.execute('SELECT phonenumber FROM phonelist WHERE name=?', (name,))
|
||||||
return c.fetchone()[0]
|
return c.fetchone()[0]
|
||||||
|
|
||||||
To help with choosing an effective cache size, the wrapped function is
|
|
||||||
instrumented with info function:
|
|
||||||
|
|
||||||
>>> for name in user_requests:
|
>>> for name in user_requests:
|
||||||
... get_phone_number(name)
|
... get_phone_number(name) # cached lookup
|
||||||
|
|
||||||
|
To help with choosing an effective cache size, the wrapped function is
|
||||||
|
instrumented for tracking cache statistics:
|
||||||
|
|
||||||
>>> get_phone_number.cache_info()
|
>>> get_phone_number.cache_info()
|
||||||
CacheInfo(maxsize=300, size=300, hits=4805, misses=980)
|
CacheInfo(hits=4805, misses=980, maxsize=300, currsize=300)
|
||||||
|
|
||||||
If the phonelist table gets updated, the outdated contents of the cache can be
|
If the phonelist table gets updated, the outdated contents of the cache can be
|
||||||
cleared with:
|
cleared with:
|
||||||
|
|
|
||||||
|
|
@ -114,7 +114,7 @@ def cmp_to_key(mycmp):
|
||||||
raise TypeError('hash not implemented')
|
raise TypeError('hash not implemented')
|
||||||
return K
|
return K
|
||||||
|
|
||||||
_CacheInfo = namedtuple("CacheInfo", "maxsize, size, hits, misses")
|
_CacheInfo = namedtuple("CacheInfo", "hits misses maxsize currsize")
|
||||||
|
|
||||||
def lru_cache(maxsize=100):
|
def lru_cache(maxsize=100):
|
||||||
"""Least-recently-used cache decorator.
|
"""Least-recently-used cache decorator.
|
||||||
|
|
@ -166,7 +166,7 @@ def lru_cache(maxsize=100):
|
||||||
def cache_info():
|
def cache_info():
|
||||||
"""Report cache statistics"""
|
"""Report cache statistics"""
|
||||||
with lock:
|
with lock:
|
||||||
return _CacheInfo(maxsize, len(cache), hits, misses)
|
return _CacheInfo(hits, misses, maxsize, len(cache))
|
||||||
|
|
||||||
def cache_clear():
|
def cache_clear():
|
||||||
"""Clear the cache and cache statistics"""
|
"""Clear the cache and cache statistics"""
|
||||||
|
|
|
||||||
|
|
@ -501,7 +501,7 @@ class TestLRU(unittest.TestCase):
|
||||||
def orig(x, y):
|
def orig(x, y):
|
||||||
return 3*x+y
|
return 3*x+y
|
||||||
f = functools.lru_cache(maxsize=20)(orig)
|
f = functools.lru_cache(maxsize=20)(orig)
|
||||||
maxsize, currsize, hits, misses = f.cache_info()
|
hits, misses, maxsize, currsize = f.cache_info()
|
||||||
self.assertEqual(maxsize, 20)
|
self.assertEqual(maxsize, 20)
|
||||||
self.assertEqual(currsize, 0)
|
self.assertEqual(currsize, 0)
|
||||||
self.assertEqual(hits, 0)
|
self.assertEqual(hits, 0)
|
||||||
|
|
@ -513,18 +513,18 @@ class TestLRU(unittest.TestCase):
|
||||||
actual = f(x, y)
|
actual = f(x, y)
|
||||||
expected = orig(x, y)
|
expected = orig(x, y)
|
||||||
self.assertEqual(actual, expected)
|
self.assertEqual(actual, expected)
|
||||||
maxsize, currsize, hits, misses = f.cache_info()
|
hits, misses, maxsize, currsize = f.cache_info()
|
||||||
self.assertTrue(hits > misses)
|
self.assertTrue(hits > misses)
|
||||||
self.assertEqual(hits + misses, 1000)
|
self.assertEqual(hits + misses, 1000)
|
||||||
self.assertEqual(currsize, 20)
|
self.assertEqual(currsize, 20)
|
||||||
|
|
||||||
f.cache_clear() # test clearing
|
f.cache_clear() # test clearing
|
||||||
maxsize, currsize, hits, misses = f.cache_info()
|
hits, misses, maxsize, currsize = f.cache_info()
|
||||||
self.assertEqual(hits, 0)
|
self.assertEqual(hits, 0)
|
||||||
self.assertEqual(misses, 0)
|
self.assertEqual(misses, 0)
|
||||||
self.assertEqual(currsize, 0)
|
self.assertEqual(currsize, 0)
|
||||||
f(x, y)
|
f(x, y)
|
||||||
maxsize, currsize, hits, misses = f.cache_info()
|
hits, misses, maxsize, currsize = f.cache_info()
|
||||||
self.assertEqual(hits, 0)
|
self.assertEqual(hits, 0)
|
||||||
self.assertEqual(misses, 1)
|
self.assertEqual(misses, 1)
|
||||||
self.assertEqual(currsize, 1)
|
self.assertEqual(currsize, 1)
|
||||||
|
|
@ -532,7 +532,7 @@ class TestLRU(unittest.TestCase):
|
||||||
# Test bypassing the cache
|
# Test bypassing the cache
|
||||||
self.assertIs(f.__wrapped__, orig)
|
self.assertIs(f.__wrapped__, orig)
|
||||||
f.__wrapped__(x, y)
|
f.__wrapped__(x, y)
|
||||||
maxsize, currsize, hits, misses = f.cache_info()
|
hits, misses, maxsize, currsize = f.cache_info()
|
||||||
self.assertEqual(hits, 0)
|
self.assertEqual(hits, 0)
|
||||||
self.assertEqual(misses, 1)
|
self.assertEqual(misses, 1)
|
||||||
self.assertEqual(currsize, 1)
|
self.assertEqual(currsize, 1)
|
||||||
|
|
@ -548,7 +548,7 @@ class TestLRU(unittest.TestCase):
|
||||||
for i in range(5):
|
for i in range(5):
|
||||||
self.assertEqual(f(), 20)
|
self.assertEqual(f(), 20)
|
||||||
self.assertEqual(f_cnt, 5)
|
self.assertEqual(f_cnt, 5)
|
||||||
maxsize, currsize, hits, misses = f.cache_info()
|
hits, misses, maxsize, currsize = f.cache_info()
|
||||||
self.assertEqual(hits, 0)
|
self.assertEqual(hits, 0)
|
||||||
self.assertEqual(misses, 5)
|
self.assertEqual(misses, 5)
|
||||||
self.assertEqual(currsize, 0)
|
self.assertEqual(currsize, 0)
|
||||||
|
|
@ -564,7 +564,7 @@ class TestLRU(unittest.TestCase):
|
||||||
for i in range(5):
|
for i in range(5):
|
||||||
self.assertEqual(f(), 20)
|
self.assertEqual(f(), 20)
|
||||||
self.assertEqual(f_cnt, 1)
|
self.assertEqual(f_cnt, 1)
|
||||||
maxsize, currsize, hits, misses = f.cache_info()
|
hits, misses, maxsize, currsize = f.cache_info()
|
||||||
self.assertEqual(hits, 4)
|
self.assertEqual(hits, 4)
|
||||||
self.assertEqual(misses, 1)
|
self.assertEqual(misses, 1)
|
||||||
self.assertEqual(currsize, 1)
|
self.assertEqual(currsize, 1)
|
||||||
|
|
@ -581,7 +581,7 @@ class TestLRU(unittest.TestCase):
|
||||||
# * * * *
|
# * * * *
|
||||||
self.assertEqual(f(x), x*10)
|
self.assertEqual(f(x), x*10)
|
||||||
self.assertEqual(f_cnt, 4)
|
self.assertEqual(f_cnt, 4)
|
||||||
maxsize, currsize, hits, misses = f.cache_info()
|
hits, misses, maxsize, currsize = f.cache_info()
|
||||||
self.assertEqual(hits, 12)
|
self.assertEqual(hits, 12)
|
||||||
self.assertEqual(misses, 4)
|
self.assertEqual(misses, 4)
|
||||||
self.assertEqual(currsize, 2)
|
self.assertEqual(currsize, 2)
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue