remove ignore_headers argument from get_cache_key

This commit is contained in:
Ramiro Juan Nocelli 2025-10-14 19:13:05 +02:00
parent 951d78a5d2
commit b530fd299c
2 changed files with 48 additions and 38 deletions

View file

@ -376,17 +376,15 @@ def _generate_cache_header_key(key_prefix, request):
return _i18n_cache_key_suffix(request, cache_key) return _i18n_cache_key_suffix(request, cache_key)
def get_cache_key( def get_cache_key(request, key_prefix=None, method="GET", cache=None):
request, key_prefix=None, method="GET", cache=None, ignore_headers=False
):
""" """
Return a cache key based on the request URL and query. It can be used Return a cache key based on the request URL and query. It can be used
in the request phase because it pulls the list of headers to take into in the request phase because it pulls the list of headers to take into
account from the global URL registry and uses those to build a cache key account from the global URL registry and uses those to build a cache key
to check against. to check against.
If there isn't a headerlist stored and `ignore_headers` argument is False, If there is no headerlist stored, the page needs to be rebuilt, so this
return None, indicating that the page needs to be rebuilt. function returns ``None``.
""" """
if key_prefix is None: if key_prefix is None:
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
@ -394,8 +392,8 @@ def get_cache_key(
if cache is None: if cache is None:
cache = caches[settings.CACHE_MIDDLEWARE_ALIAS] cache = caches[settings.CACHE_MIDDLEWARE_ALIAS]
headerlist = cache.get(cache_key) headerlist = cache.get(cache_key)
if headerlist is not None or ignore_headers: if headerlist is not None:
return _generate_cache_key(request, method, headerlist or [], key_prefix) return _generate_cache_key(request, method, headerlist, key_prefix)
else: else:
return None return None
@ -448,40 +446,32 @@ def _to_tuple(s):
return t[0].lower(), True return t[0].lower(), True
def invalidate_view_cache( def invalidate_view_cache(path=None, request=None, key_prefix=None, cache=None):
path=None, request=None, vary_headers=None, key_prefix=None, cache=None
):
""" """
This function first creates a fake WSGIRequest to compute the cache key. Delete a view cache key based on either a relative URL (``path``)
The key looks like: or a request object (``request``).
views.decorators.cache.cache_page.key_prefix.GET.0fcb3cd9d5b34c8fe83f615913d8509b.c4ca4238a0b923820dcc509a6f75849b.en-us.UTC
The first hash corresponds to the full url (including query params),
the second to the header values
vary_headers should be a dict of every header used for this particular view The cache key is reconstructed in two steps:
In local environment, we have two defined renderers (default of DRF), 1. A headers cache key is built using the absolute URL,
thus DRF adds `Accept` to the Vary headers key prefix, and locale code.
2. A response cache key is then built using the absolute URL,
key prefix, HTTP method, and recovered headers.
either `path` or `request` arguments should be passed; The ``key_prefix`` must match the value used in the ``cache_page``
if both are passed `path` will be ignored decorator for the corresponding view.
Note: If LocaleMiddleware is used, Either the ``path`` or ``request`` parameter must be provided.
we'll need to use the same language code as the one in the cached request If both are given, ``path`` takes precedence.
""" """
if not request: if not request:
assert path is not None, "either `path` or `request` arguments needed" assert path is not None, "either `path` or `request` arguments needed"
factory = RequestFactory() factory = RequestFactory()
request = factory.get(path) request = factory.get(path)
if vary_headers:
request.META.update(vary_headers)
if cache is None: if cache is None:
cache = caches[settings.CACHE_MIDDLEWARE_ALIAS] cache = caches[settings.CACHE_MIDDLEWARE_ALIAS]
cache_key = get_cache_key( cache_key = get_cache_key(request, key_prefix=key_prefix, cache=cache)
request, key_prefix=key_prefix, ignore_headers=True, cache=cache
)
if cache_key is None: if cache_key is None:
return 0 return 0

40
tests/cache/tests.py vendored
View file

@ -2632,9 +2632,7 @@ class CacheMiddlewareTest(SimpleTestCase):
view = cache_page(10)(hello_world_view) view = cache_page(10)(hello_world_view)
request = self.factory.get("/view/") request = self.factory.get("/view/")
_ = view(request, "0") _ = view(request, "0")
cache_key = get_cache_key( cache_key = get_cache_key(request=request, key_prefix="", cache=cache)
request=request, key_prefix="", ignore_headers=True, cache=cache
)
cached_response = cache.get(cache_key) cached_response = cache.get(cache_key)
# Verify request.content has been chached # Verify request.content has been chached
@ -2653,9 +2651,7 @@ class CacheMiddlewareTest(SimpleTestCase):
path = "/view/" path = "/view/"
request = self.factory.get(path) request = self.factory.get(path)
_ = view(request, "0") _ = view(request, "0")
cache_key = get_cache_key( cache_key = get_cache_key(request=request, key_prefix="", cache=cache)
request=request, key_prefix="", ignore_headers=True, cache=cache
)
cached_response = cache.get(cache_key) cached_response = cache.get(cache_key)
# Verify request.content has been chached # Verify request.content has been chached
@ -2673,7 +2669,7 @@ class CacheMiddlewareTest(SimpleTestCase):
# Cache view and inject Vary headers to Response object # Cache view and inject Vary headers to Response object
view = cache_page(10, key_prefix="")( view = cache_page(10, key_prefix="")(
vary_on_headers("Accept-Encoding")(hello_world_view) vary_on_headers("Accept-Encoding", "Accept")(hello_world_view)
) )
path = "/view/" path = "/view/"
request = self.factory.get(path) request = self.factory.get(path)
@ -2682,9 +2678,7 @@ class CacheMiddlewareTest(SimpleTestCase):
# Check response headers # Check response headers
self.assertTrue(response.has_header("Vary")) self.assertTrue(response.has_header("Vary"))
cache_key = get_cache_key( cache_key = get_cache_key(request=request, key_prefix="", cache=cache)
request=request, key_prefix="", ignore_headers=False, cache=cache
)
cached_response = cache.get(cache_key) cached_response = cache.get(cache_key)
# Verify request.content has been chached # Verify request.content has been chached
@ -2697,6 +2691,32 @@ class CacheMiddlewareTest(SimpleTestCase):
# Confirm key/value has been deleted from cache # Confirm key/value has been deleted from cache
self.assertIsNone(cached_response) self.assertIsNone(cached_response)
def test_cache_key_prefix_missmatch(self):
# Wrap the view with the cache_page decorator (no key_prefix specified)
view = cache_page(10)(hello_world_view)
path = "/view/"
request = self.factory.get(path)
_ = view(request, "0")
# Attempt to retrieve the cache key without specifying key_prefix
cache_key = get_cache_key(request=request, cache=cache)
# Because get_cache_key defaults to using
# settings.CACHE_MIDDLEWARE_KEY_PREFIX when key_prefix is None,
# this should not match the cached key
self.assertIsNone(cache_key)
# Try again, explicitly passing the default cache_page key_prefix
# (empty string)
cache_key = get_cache_key(request=request, cache=cache, key_prefix="")
# The key should now be found
self.assertIsNotNone(cache_key)
# Confirm that the cached response content matches the view output
cached_response = cache.get(cache_key)
self.assertEqual(cached_response.content, b"Hello World 0")
def test_view_decorator(self): def test_view_decorator(self):
# decorate the same view with different cache decorators # decorate the same view with different cache decorators
default_view = cache_page(3)(hello_world_view) default_view = cache_page(3)(hello_world_view)