mirror of
https://github.com/python/cpython.git
synced 2025-07-08 03:45:36 +00:00
gh-131525: Remove _HashedSeq
wrapper from lru_cache
(gh-131922)
This commit is contained in:
parent
c535a132e4
commit
0147be09d5
1 changed files with 1 additions and 17 deletions
|
@ -516,22 +516,6 @@ def _unwrap_partialmethod(func):
|
|||
|
||||
_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
|
||||
|
||||
class _HashedSeq(list):
|
||||
""" This class guarantees that hash() will be called no more than once
|
||||
per element. This is important because the lru_cache() will hash
|
||||
the key multiple times on a cache miss.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = 'hashvalue'
|
||||
|
||||
def __init__(self, tup, hash=hash):
|
||||
self[:] = tup
|
||||
self.hashvalue = hash(tup)
|
||||
|
||||
def __hash__(self):
|
||||
return self.hashvalue
|
||||
|
||||
def _make_key(args, kwds, typed,
|
||||
kwd_mark = (object(),),
|
||||
fasttypes = {int, str},
|
||||
|
@ -561,7 +545,7 @@ def _make_key(args, kwds, typed,
|
|||
key += tuple(type(v) for v in kwds.values())
|
||||
elif len(key) == 1 and type(key[0]) in fasttypes:
|
||||
return key[0]
|
||||
return _HashedSeq(key)
|
||||
return key
|
||||
|
||||
def lru_cache(maxsize=128, typed=False):
|
||||
"""Least-recently-used cache decorator.
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue