gh-133253: making linecache thread-safe (#133305)

Co-authored-by: Sam Gross <colesbury@gmail.com>
This commit is contained in:
vfdev 2025-05-09 08:45:16 +02:00 committed by GitHub
parent 6d5a8c2ec1
commit 8054184f9f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 74 additions and 31 deletions

View file

@ -33,10 +33,9 @@ def getlines(filename, module_globals=None):
"""Get the lines for a Python source file from the cache. """Get the lines for a Python source file from the cache.
Update the cache if it doesn't contain an entry for this file already.""" Update the cache if it doesn't contain an entry for this file already."""
if filename in cache: entry = cache.get(filename, None)
entry = cache[filename] if entry is not None and len(entry) != 1:
if len(entry) != 1: return entry[2]
return cache[filename][2]
try: try:
return updatecache(filename, module_globals) return updatecache(filename, module_globals)
@ -56,10 +55,9 @@ def _make_key(code):
def _getlines_from_code(code): def _getlines_from_code(code):
code_id = _make_key(code) code_id = _make_key(code)
if code_id in _interactive_cache: entry = _interactive_cache.get(code_id, None)
entry = _interactive_cache[code_id] if entry is not None and len(entry) != 1:
if len(entry) != 1: return entry[2]
return _interactive_cache[code_id][2]
return [] return []
@ -84,12 +82,8 @@ def checkcache(filename=None):
filenames = [filename] filenames = [filename]
for filename in filenames: for filename in filenames:
try: entry = cache.get(filename, None)
entry = cache[filename] if entry is None or len(entry) == 1:
except KeyError:
continue
if len(entry) == 1:
# lazy cache entry, leave it lazy. # lazy cache entry, leave it lazy.
continue continue
size, mtime, lines, fullname = entry size, mtime, lines, fullname = entry
@ -125,9 +119,7 @@ def updatecache(filename, module_globals=None):
# These import can fail if the interpreter is shutting down # These import can fail if the interpreter is shutting down
return [] return []
if filename in cache: entry = cache.pop(filename, None)
if len(cache[filename]) != 1:
cache.pop(filename, None)
if _source_unavailable(filename): if _source_unavailable(filename):
return [] return []
@ -146,9 +138,12 @@ def updatecache(filename, module_globals=None):
# Realise a lazy loader based lookup if there is one # Realise a lazy loader based lookup if there is one
# otherwise try to lookup right now. # otherwise try to lookup right now.
if lazycache(filename, module_globals): lazy_entry = entry if entry is not None and len(entry) == 1 else None
if lazy_entry is None:
lazy_entry = _make_lazycache_entry(filename, module_globals)
if lazy_entry is not None:
try: try:
data = cache[filename][0]() data = lazy_entry[0]()
except (ImportError, OSError): except (ImportError, OSError):
pass pass
else: else:
@ -156,13 +151,14 @@ def updatecache(filename, module_globals=None):
# No luck, the PEP302 loader cannot find the source # No luck, the PEP302 loader cannot find the source
# for this module. # for this module.
return [] return []
cache[filename] = ( entry = (
len(data), len(data),
None, None,
[line + '\n' for line in data.splitlines()], [line + '\n' for line in data.splitlines()],
fullname fullname
) )
return cache[filename][2] cache[filename] = entry
return entry[2]
# Try looking through the module search path, which is only useful # Try looking through the module search path, which is only useful
# when handling a relative filename. # when handling a relative filename.
@ -211,13 +207,20 @@ def lazycache(filename, module_globals):
get_source method must be found, the filename must be a cacheable get_source method must be found, the filename must be a cacheable
filename, and the filename must not be already cached. filename, and the filename must not be already cached.
""" """
if filename in cache: entry = cache.get(filename, None)
if len(cache[filename]) == 1: if entry is not None:
return True return len(entry) == 1
else:
return False lazy_entry = _make_lazycache_entry(filename, module_globals)
if lazy_entry is not None:
cache[filename] = lazy_entry
return True
return False
def _make_lazycache_entry(filename, module_globals):
if not filename or (filename.startswith('<') and filename.endswith('>')): if not filename or (filename.startswith('<') and filename.endswith('>')):
return False return None
# Try for a __loader__, if available # Try for a __loader__, if available
if module_globals and '__name__' in module_globals: if module_globals and '__name__' in module_globals:
spec = module_globals.get('__spec__') spec = module_globals.get('__spec__')
@ -230,9 +233,10 @@ def lazycache(filename, module_globals):
if name and get_source: if name and get_source:
def get_lines(name=name, *args, **kwargs): def get_lines(name=name, *args, **kwargs):
return get_source(name, *args, **kwargs) return get_source(name, *args, **kwargs)
cache[filename] = (get_lines,) return (get_lines,)
return True return None
return False
def _register_code(code, string, name): def _register_code(code, string, name):
entry = (len(string), entry = (len(string),
@ -245,4 +249,5 @@ def _register_code(code, string, name):
for const in code.co_consts: for const in code.co_consts:
if isinstance(const, type(code)): if isinstance(const, type(code)):
stack.append(const) stack.append(const)
_interactive_cache[_make_key(code)] = entry key = _make_key(code)
_interactive_cache[key] = entry

View file

@ -4,10 +4,12 @@ import linecache
import unittest import unittest
import os.path import os.path
import tempfile import tempfile
import threading
import tokenize import tokenize
from importlib.machinery import ModuleSpec from importlib.machinery import ModuleSpec
from test import support from test import support
from test.support import os_helper from test.support import os_helper
from test.support import threading_helper
from test.support.script_helper import assert_python_ok from test.support.script_helper import assert_python_ok
@ -374,5 +376,40 @@ class LineCacheInvalidationTests(unittest.TestCase):
self.assertIn(self.unchanged_file, linecache.cache) self.assertIn(self.unchanged_file, linecache.cache)
class MultiThreadingTest(unittest.TestCase):
@threading_helper.reap_threads
@threading_helper.requires_working_threading()
def test_read_write_safety(self):
with tempfile.TemporaryDirectory() as tmpdirname:
filenames = []
for i in range(10):
name = os.path.join(tmpdirname, f"test_{i}.py")
with open(name, "w") as h:
h.write("import time\n")
h.write("import system\n")
filenames.append(name)
def linecache_get_line(b):
b.wait()
for _ in range(100):
for name in filenames:
linecache.getline(name, 1)
def check(funcs):
barrier = threading.Barrier(len(funcs))
threads = []
for func in funcs:
thread = threading.Thread(target=func, args=(barrier,))
threads.append(thread)
with threading_helper.start_threads(threads):
pass
check([linecache_get_line] * 20)
if __name__ == "__main__": if __name__ == "__main__":
unittest.main() unittest.main()

View file

@ -0,0 +1 @@
Fix thread-safety issues in :mod:`linecache`.