mirror of
https://github.com/python/cpython.git
synced 2025-08-04 17:08:35 +00:00
[3.11] gh-109295: Clean up multiprocessing in test_asyncio and test_compileall (GH-109298) (#109302)
gh-109295: Clean up multiprocessing in test_asyncio and test_compileall (GH-109298)
test_asyncio and test_compileall now clean up multiprocessing by
calling multiprocessing _cleanup_tests(): explicitly clean up
resources and stop background processes like the resource tracker.
(cherry picked from commit 09ea4b8706
)
Co-authored-by: Victor Stinner <vstinner@python.org>
This commit is contained in:
parent
82a18069a1
commit
abf3a68fe3
2 changed files with 10 additions and 3 deletions
|
@ -31,6 +31,7 @@ from asyncio import coroutines
|
|||
from asyncio import events
|
||||
from asyncio import proactor_events
|
||||
from asyncio import selector_events
|
||||
from multiprocessing.util import _cleanup_tests as multiprocessing_cleanup_tests
|
||||
from test.test_asyncio import utils as test_utils
|
||||
from test import support
|
||||
from test.support import socket_helper
|
||||
|
@ -2730,6 +2731,8 @@ class GetEventLoopTestsMixin:
|
|||
# multiprocessing.synchronize module cannot be imported.
|
||||
support.skip_if_broken_multiprocessing_synchronize()
|
||||
|
||||
self.addCleanup(multiprocessing_cleanup_tests)
|
||||
|
||||
async def main():
|
||||
pool = concurrent.futures.ProcessPoolExecutor()
|
||||
result = await self.loop.run_in_executor(
|
||||
|
|
|
@ -18,6 +18,7 @@ from unittest import mock, skipUnless
|
|||
try:
|
||||
# compileall relies on ProcessPoolExecutor if ProcessPoolExecutor exists
|
||||
# and it can function.
|
||||
from multiprocessing.util import _cleanup_tests as multiprocessing_cleanup_tests
|
||||
from concurrent.futures import ProcessPoolExecutor
|
||||
from concurrent.futures.process import _check_system_limits
|
||||
_check_system_limits()
|
||||
|
@ -54,6 +55,8 @@ class CompileallTestsBase:
|
|||
|
||||
def setUp(self):
|
||||
self.directory = tempfile.mkdtemp()
|
||||
self.addCleanup(shutil.rmtree, self.directory)
|
||||
|
||||
self.source_path = os.path.join(self.directory, '_test.py')
|
||||
self.bc_path = importlib.util.cache_from_source(self.source_path)
|
||||
with open(self.source_path, 'w', encoding="utf-8") as file:
|
||||
|
@ -66,9 +69,6 @@ class CompileallTestsBase:
|
|||
self.source_path3 = os.path.join(self.subdirectory, '_test3.py')
|
||||
shutil.copyfile(self.source_path, self.source_path3)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.directory)
|
||||
|
||||
def add_bad_source_file(self):
|
||||
self.bad_source_path = os.path.join(self.directory, '_test_bad.py')
|
||||
with open(self.bad_source_path, 'w', encoding="utf-8") as file:
|
||||
|
@ -307,9 +307,13 @@ class CompileallTestsBase:
|
|||
script_helper.make_script(path, "__init__", "")
|
||||
mods.append(script_helper.make_script(path, "mod",
|
||||
"def fn(): 1/0\nfn()\n"))
|
||||
|
||||
if parallel:
|
||||
self.addCleanup(multiprocessing_cleanup_tests)
|
||||
compileall.compile_dir(
|
||||
self.directory, quiet=True, ddir=ddir,
|
||||
workers=2 if parallel else 1)
|
||||
|
||||
self.assertTrue(mods)
|
||||
for mod in mods:
|
||||
self.assertTrue(mod.startswith(self.directory), mod)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue