mirror of
https://github.com/python/cpython.git
synced 2025-09-26 18:29:57 +00:00
gh-109162: libregrtest: add worker.py (#109229)
Add new worker.py file: * Move create_worker_process() and worker_process() to this file. * Add main() function to worker.py. create_worker_process() now runs the command: "python -m test.libregrtest.worker JSON". * create_worker_process() now starts the worker process in the current working directory. Regrtest now gets the absolute path of the reflog.txt filename: -R command line option filename. * Remove --worker-json command line option. Remove test_regrtest.test_worker_json(). Related changes: * Add write_json() and from_json() methods to TestResult. * Rename select_temp_dir() to get_temp_dir() and move it to utils. * Rename make_temp_dir() to get_work_dir() and move it to utils. It no longer calls os.makedirs(): Regrtest.main() now calls it. * Move fix_umask() to utils. The function is now called by setup_tests(). * Move StrPath to utils. * Add exit_timeout() context manager to utils. * RunTests: Replace junit_filename (StrPath) with use_junit (bool).
This commit is contained in:
parent
e55aab9578
commit
a939b65aa6
10 changed files with 238 additions and 213 deletions
|
@ -216,7 +216,6 @@ def _create_parser():
|
||||||
group.add_argument('--wait', action='store_true',
|
group.add_argument('--wait', action='store_true',
|
||||||
help='wait for user input, e.g., allow a debugger '
|
help='wait for user input, e.g., allow a debugger '
|
||||||
'to be attached')
|
'to be attached')
|
||||||
group.add_argument('--worker-json', metavar='ARGS')
|
|
||||||
group.add_argument('-S', '--start', metavar='START',
|
group.add_argument('-S', '--start', metavar='START',
|
||||||
help='the name of the test at which to start.' +
|
help='the name of the test at which to start.' +
|
||||||
more_details)
|
more_details)
|
||||||
|
|
|
@ -1,34 +1,27 @@
|
||||||
import faulthandler
|
|
||||||
import locale
|
import locale
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import sysconfig
|
|
||||||
import tempfile
|
|
||||||
import time
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
from test import support
|
||||||
|
from test.support import os_helper
|
||||||
|
|
||||||
from test.libregrtest.cmdline import _parse_args, Namespace
|
from test.libregrtest.cmdline import _parse_args, Namespace
|
||||||
from test.libregrtest.logger import Logger
|
from test.libregrtest.logger import Logger
|
||||||
from test.libregrtest.runtest import (
|
from test.libregrtest.runtest import (
|
||||||
findtests, split_test_packages, run_single_test, abs_module_name,
|
findtests, split_test_packages, run_single_test, abs_module_name,
|
||||||
PROGRESS_MIN_TIME, State, RunTests, HuntRefleak,
|
PROGRESS_MIN_TIME, State, RunTests, HuntRefleak,
|
||||||
FilterTuple, TestList, StrPath, StrJSON, TestName)
|
FilterTuple, TestList, StrJSON, TestName)
|
||||||
from test.libregrtest.setup import setup_tests, setup_test_dir
|
from test.libregrtest.setup import setup_tests, setup_test_dir
|
||||||
from test.libregrtest.pgo import setup_pgo_tests
|
from test.libregrtest.pgo import setup_pgo_tests
|
||||||
from test.libregrtest.results import TestResults
|
from test.libregrtest.results import TestResults
|
||||||
from test.libregrtest.utils import (strip_py_suffix, count, format_duration,
|
from test.libregrtest.utils import (
|
||||||
printlist, get_build_info)
|
strip_py_suffix, count, format_duration, StrPath,
|
||||||
from test import support
|
printlist, get_build_info, get_temp_dir, get_work_dir, exit_timeout)
|
||||||
from test.support import os_helper
|
|
||||||
from test.support import threading_helper
|
|
||||||
|
|
||||||
|
|
||||||
# bpo-38203: Maximum delay in seconds to exit Python (call Py_Finalize()).
|
|
||||||
# Used to protect against threading._shutdown() hang.
|
|
||||||
# Must be smaller than buildbot "1200 seconds without output" limit.
|
|
||||||
EXIT_TIMEOUT = 120.0
|
|
||||||
|
|
||||||
|
|
||||||
class Regrtest:
|
class Regrtest:
|
||||||
|
@ -104,7 +97,9 @@ class Regrtest:
|
||||||
self.verbose: bool = ns.verbose
|
self.verbose: bool = ns.verbose
|
||||||
self.quiet: bool = ns.quiet
|
self.quiet: bool = ns.quiet
|
||||||
if ns.huntrleaks:
|
if ns.huntrleaks:
|
||||||
self.hunt_refleak: HuntRefleak = HuntRefleak(*ns.huntrleaks)
|
warmups, runs, filename = ns.huntrleaks
|
||||||
|
filename = os.path.abspath(filename)
|
||||||
|
self.hunt_refleak: HuntRefleak = HuntRefleak(warmups, runs, filename)
|
||||||
else:
|
else:
|
||||||
self.hunt_refleak = None
|
self.hunt_refleak = None
|
||||||
self.test_dir: StrPath | None = ns.testdir
|
self.test_dir: StrPath | None = ns.testdir
|
||||||
|
@ -454,64 +449,6 @@ class Regrtest:
|
||||||
state = self.get_state()
|
state = self.get_state()
|
||||||
print(f"Result: {state}")
|
print(f"Result: {state}")
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fix_umask():
|
|
||||||
if support.is_emscripten:
|
|
||||||
# Emscripten has default umask 0o777, which breaks some tests.
|
|
||||||
# see https://github.com/emscripten-core/emscripten/issues/17269
|
|
||||||
old_mask = os.umask(0)
|
|
||||||
if old_mask == 0o777:
|
|
||||||
os.umask(0o027)
|
|
||||||
else:
|
|
||||||
os.umask(old_mask)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def select_temp_dir(tmp_dir):
|
|
||||||
if tmp_dir:
|
|
||||||
tmp_dir = os.path.expanduser(tmp_dir)
|
|
||||||
else:
|
|
||||||
# When tests are run from the Python build directory, it is best practice
|
|
||||||
# to keep the test files in a subfolder. This eases the cleanup of leftover
|
|
||||||
# files using the "make distclean" command.
|
|
||||||
if sysconfig.is_python_build():
|
|
||||||
tmp_dir = sysconfig.get_config_var('abs_builddir')
|
|
||||||
if tmp_dir is None:
|
|
||||||
# bpo-30284: On Windows, only srcdir is available. Using
|
|
||||||
# abs_builddir mostly matters on UNIX when building Python
|
|
||||||
# out of the source tree, especially when the source tree
|
|
||||||
# is read only.
|
|
||||||
tmp_dir = sysconfig.get_config_var('srcdir')
|
|
||||||
tmp_dir = os.path.join(tmp_dir, 'build')
|
|
||||||
else:
|
|
||||||
tmp_dir = tempfile.gettempdir()
|
|
||||||
|
|
||||||
return os.path.abspath(tmp_dir)
|
|
||||||
|
|
||||||
def is_worker(self):
|
|
||||||
return (self.worker_json is not None)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def make_temp_dir(tmp_dir: StrPath, is_worker: bool):
|
|
||||||
os.makedirs(tmp_dir, exist_ok=True)
|
|
||||||
|
|
||||||
# Define a writable temp dir that will be used as cwd while running
|
|
||||||
# the tests. The name of the dir includes the pid to allow parallel
|
|
||||||
# testing (see the -j option).
|
|
||||||
# Emscripten and WASI have stubbed getpid(), Emscripten has only
|
|
||||||
# milisecond clock resolution. Use randint() instead.
|
|
||||||
if sys.platform in {"emscripten", "wasi"}:
|
|
||||||
nounce = random.randint(0, 1_000_000)
|
|
||||||
else:
|
|
||||||
nounce = os.getpid()
|
|
||||||
|
|
||||||
if is_worker:
|
|
||||||
work_dir = 'test_python_worker_{}'.format(nounce)
|
|
||||||
else:
|
|
||||||
work_dir = 'test_python_{}'.format(nounce)
|
|
||||||
work_dir += os_helper.FS_NONASCII
|
|
||||||
work_dir = os.path.join(tmp_dir, work_dir)
|
|
||||||
return work_dir
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def cleanup_temp_dir(tmp_dir: StrPath):
|
def cleanup_temp_dir(tmp_dir: StrPath):
|
||||||
import glob
|
import glob
|
||||||
|
@ -534,17 +471,16 @@ class Regrtest:
|
||||||
|
|
||||||
strip_py_suffix(self.cmdline_args)
|
strip_py_suffix(self.cmdline_args)
|
||||||
|
|
||||||
self.tmp_dir = self.select_temp_dir(self.tmp_dir)
|
self.tmp_dir = get_temp_dir(self.tmp_dir)
|
||||||
|
|
||||||
self.fix_umask()
|
|
||||||
|
|
||||||
if self.want_cleanup:
|
if self.want_cleanup:
|
||||||
self.cleanup_temp_dir(self.tmp_dir)
|
self.cleanup_temp_dir(self.tmp_dir)
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
work_dir = self.make_temp_dir(self.tmp_dir, self.is_worker())
|
os.makedirs(self.tmp_dir, exist_ok=True)
|
||||||
|
work_dir = get_work_dir(parent_dir=self.tmp_dir)
|
||||||
|
|
||||||
try:
|
with exit_timeout():
|
||||||
# Run the tests in a context manager that temporarily changes the
|
# Run the tests in a context manager that temporarily changes the
|
||||||
# CWD to a temporary and writable directory. If it's not possible
|
# CWD to a temporary and writable directory. If it's not possible
|
||||||
# to create or change the CWD, the original CWD will be used.
|
# to create or change the CWD, the original CWD will be used.
|
||||||
|
@ -556,13 +492,6 @@ class Regrtest:
|
||||||
# processes.
|
# processes.
|
||||||
|
|
||||||
self._main()
|
self._main()
|
||||||
except SystemExit as exc:
|
|
||||||
# bpo-38203: Python can hang at exit in Py_Finalize(), especially
|
|
||||||
# on threading._shutdown() call: put a timeout
|
|
||||||
if threading_helper.can_start_thread:
|
|
||||||
faulthandler.dump_traceback_later(EXIT_TIMEOUT, exit=True)
|
|
||||||
|
|
||||||
sys.exit(exc.code)
|
|
||||||
|
|
||||||
def create_run_tests(self):
|
def create_run_tests(self):
|
||||||
return RunTests(
|
return RunTests(
|
||||||
|
@ -579,7 +508,7 @@ class Regrtest:
|
||||||
quiet=self.quiet,
|
quiet=self.quiet,
|
||||||
hunt_refleak=self.hunt_refleak,
|
hunt_refleak=self.hunt_refleak,
|
||||||
test_dir=self.test_dir,
|
test_dir=self.test_dir,
|
||||||
junit_filename=self.junit_filename,
|
use_junit=(self.junit_filename is not None),
|
||||||
memory_limit=self.memory_limit,
|
memory_limit=self.memory_limit,
|
||||||
gc_threshold=self.gc_threshold,
|
gc_threshold=self.gc_threshold,
|
||||||
use_resources=self.use_resources,
|
use_resources=self.use_resources,
|
||||||
|
@ -634,11 +563,6 @@ class Regrtest:
|
||||||
self.fail_rerun)
|
self.fail_rerun)
|
||||||
|
|
||||||
def _main(self):
|
def _main(self):
|
||||||
if self.is_worker():
|
|
||||||
from test.libregrtest.runtest_mp import worker_process
|
|
||||||
worker_process(self.worker_json)
|
|
||||||
return
|
|
||||||
|
|
||||||
if self.want_wait:
|
if self.want_wait:
|
||||||
input("Press any key to continue...")
|
input("Press any key to continue...")
|
||||||
|
|
||||||
|
|
|
@ -68,7 +68,6 @@ def runtest_refleak(test_name, test_func,
|
||||||
warmups = hunt_refleak.warmups
|
warmups = hunt_refleak.warmups
|
||||||
runs = hunt_refleak.runs
|
runs = hunt_refleak.runs
|
||||||
filename = hunt_refleak.filename
|
filename = hunt_refleak.filename
|
||||||
filename = os.path.join(os_helper.SAVEDCWD, filename)
|
|
||||||
repcount = warmups + runs
|
repcount = warmups + runs
|
||||||
|
|
||||||
# Pre-allocate to ensure that the loop doesn't allocate anything new
|
# Pre-allocate to ensure that the loop doesn't allocate anything new
|
||||||
|
|
|
@ -2,9 +2,10 @@ import sys
|
||||||
from test.support import TestStats
|
from test.support import TestStats
|
||||||
|
|
||||||
from test.libregrtest.runtest import (
|
from test.libregrtest.runtest import (
|
||||||
TestName, TestTuple, TestList, FilterDict, StrPath, State,
|
TestName, TestTuple, TestList, FilterDict, State,
|
||||||
TestResult, RunTests)
|
TestResult, RunTests)
|
||||||
from test.libregrtest.utils import printlist, count, format_duration
|
from test.libregrtest.utils import (
|
||||||
|
printlist, count, format_duration, StrPath)
|
||||||
|
|
||||||
|
|
||||||
EXITCODE_BAD_TEST = 2
|
EXITCODE_BAD_TEST = 2
|
||||||
|
|
|
@ -17,11 +17,11 @@ from test.support import TestStats
|
||||||
from test.support import os_helper
|
from test.support import os_helper
|
||||||
from test.support import threading_helper
|
from test.support import threading_helper
|
||||||
from test.libregrtest.save_env import saved_test_environment
|
from test.libregrtest.save_env import saved_test_environment
|
||||||
from test.libregrtest.utils import clear_caches, format_duration, print_warning
|
from test.libregrtest.utils import (
|
||||||
|
clear_caches, format_duration, print_warning, StrPath)
|
||||||
|
|
||||||
|
|
||||||
StrJSON = str
|
StrJSON = str
|
||||||
StrPath = str
|
|
||||||
TestName = str
|
TestName = str
|
||||||
TestTuple = tuple[TestName, ...]
|
TestTuple = tuple[TestName, ...]
|
||||||
TestList = list[TestName]
|
TestList = list[TestName]
|
||||||
|
@ -215,6 +215,33 @@ class TestResult:
|
||||||
return None
|
return None
|
||||||
return tuple(match_tests)
|
return tuple(match_tests)
|
||||||
|
|
||||||
|
def write_json(self, file) -> None:
|
||||||
|
json.dump(self, file, cls=_EncodeTestResult)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_json(worker_json) -> 'TestResult':
|
||||||
|
return json.loads(worker_json, object_hook=_decode_test_result)
|
||||||
|
|
||||||
|
|
||||||
|
class _EncodeTestResult(json.JSONEncoder):
|
||||||
|
def default(self, o: Any) -> dict[str, Any]:
|
||||||
|
if isinstance(o, TestResult):
|
||||||
|
result = dataclasses.asdict(o)
|
||||||
|
result["__test_result__"] = o.__class__.__name__
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
return super().default(o)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_test_result(data: dict[str, Any]) -> TestResult | dict[str, Any]:
|
||||||
|
if "__test_result__" in data:
|
||||||
|
data.pop('__test_result__')
|
||||||
|
if data['stats'] is not None:
|
||||||
|
data['stats'] = TestStats(**data['stats'])
|
||||||
|
return TestResult(**data)
|
||||||
|
else:
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass(slots=True, frozen=True)
|
@dataclasses.dataclass(slots=True, frozen=True)
|
||||||
class RunTests:
|
class RunTests:
|
||||||
|
@ -234,7 +261,7 @@ class RunTests:
|
||||||
quiet: bool = False
|
quiet: bool = False
|
||||||
hunt_refleak: HuntRefleak | None = None
|
hunt_refleak: HuntRefleak | None = None
|
||||||
test_dir: StrPath | None = None
|
test_dir: StrPath | None = None
|
||||||
junit_filename: StrPath | None = None
|
use_junit: bool = False
|
||||||
memory_limit: str | None = None
|
memory_limit: str | None = None
|
||||||
gc_threshold: int | None = None
|
gc_threshold: int | None = None
|
||||||
use_resources: list[str] = None
|
use_resources: list[str] = None
|
||||||
|
@ -358,7 +385,7 @@ def setup_support(runtests: RunTests):
|
||||||
support.set_match_tests(runtests.match_tests, runtests.ignore_tests)
|
support.set_match_tests(runtests.match_tests, runtests.ignore_tests)
|
||||||
support.failfast = runtests.fail_fast
|
support.failfast = runtests.fail_fast
|
||||||
support.verbose = runtests.verbose
|
support.verbose = runtests.verbose
|
||||||
if runtests.junit_filename:
|
if runtests.use_junit:
|
||||||
support.junit_xml_list = []
|
support.junit_xml_list = []
|
||||||
else:
|
else:
|
||||||
support.junit_xml_list = None
|
support.junit_xml_list = None
|
||||||
|
@ -434,8 +461,8 @@ def run_single_test(test_name: TestName, runtests: RunTests) -> TestResult:
|
||||||
|
|
||||||
Returns a TestResult.
|
Returns a TestResult.
|
||||||
|
|
||||||
If runtests.junit_filename is not None, xml_data is a list containing each
|
If runtests.use_junit, xml_data is a list containing each generated
|
||||||
generated testsuite element.
|
testsuite element.
|
||||||
"""
|
"""
|
||||||
start_time = time.perf_counter()
|
start_time = time.perf_counter()
|
||||||
result = TestResult(test_name)
|
result = TestResult(test_name)
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import dataclasses
|
import dataclasses
|
||||||
import faulthandler
|
import faulthandler
|
||||||
import json
|
|
||||||
import os.path
|
import os.path
|
||||||
import queue
|
import queue
|
||||||
import signal
|
import signal
|
||||||
|
@ -10,19 +9,19 @@ import tempfile
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
from typing import NoReturn, Literal, Any, TextIO
|
from typing import Literal, TextIO
|
||||||
|
|
||||||
from test import support
|
from test import support
|
||||||
from test.support import os_helper
|
from test.support import os_helper
|
||||||
from test.support import TestStats
|
|
||||||
|
|
||||||
from test.libregrtest.main import Regrtest
|
from test.libregrtest.main import Regrtest
|
||||||
from test.libregrtest.runtest import (
|
from test.libregrtest.runtest import (
|
||||||
run_single_test, TestResult, State, PROGRESS_MIN_TIME,
|
TestResult, State, PROGRESS_MIN_TIME,
|
||||||
FilterTuple, RunTests, StrPath, StrJSON, TestName)
|
RunTests, TestName)
|
||||||
from test.libregrtest.setup import setup_tests, setup_test_dir
|
|
||||||
from test.libregrtest.results import TestResults
|
from test.libregrtest.results import TestResults
|
||||||
from test.libregrtest.utils import format_duration, print_warning
|
from test.libregrtest.utils import (
|
||||||
|
format_duration, print_warning, StrPath)
|
||||||
|
from test.libregrtest.worker import create_worker_process, USE_PROCESS_GROUP
|
||||||
|
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
import locale
|
import locale
|
||||||
|
@ -41,75 +40,6 @@ assert MAIN_PROCESS_TIMEOUT >= PROGRESS_UPDATE
|
||||||
# Time to wait until a worker completes: should be immediate
|
# Time to wait until a worker completes: should be immediate
|
||||||
JOIN_TIMEOUT = 30.0 # seconds
|
JOIN_TIMEOUT = 30.0 # seconds
|
||||||
|
|
||||||
USE_PROCESS_GROUP = (hasattr(os, "setsid") and hasattr(os, "killpg"))
|
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass(slots=True)
|
|
||||||
class WorkerJob:
|
|
||||||
runtests: RunTests
|
|
||||||
|
|
||||||
|
|
||||||
def create_worker_process(runtests: RunTests,
|
|
||||||
output_file: TextIO,
|
|
||||||
tmp_dir: StrPath | None = None) -> subprocess.Popen:
|
|
||||||
python_cmd = runtests.python_cmd
|
|
||||||
worker_json = runtests.as_json()
|
|
||||||
|
|
||||||
if python_cmd is not None:
|
|
||||||
executable = python_cmd
|
|
||||||
else:
|
|
||||||
executable = [sys.executable]
|
|
||||||
cmd = [*executable, *support.args_from_interpreter_flags(),
|
|
||||||
'-u', # Unbuffered stdout and stderr
|
|
||||||
'-m', 'test.regrtest',
|
|
||||||
'--worker-json', worker_json]
|
|
||||||
|
|
||||||
env = dict(os.environ)
|
|
||||||
if tmp_dir is not None:
|
|
||||||
env['TMPDIR'] = tmp_dir
|
|
||||||
env['TEMP'] = tmp_dir
|
|
||||||
env['TMP'] = tmp_dir
|
|
||||||
|
|
||||||
# Running the child from the same working directory as regrtest's original
|
|
||||||
# invocation ensures that TEMPDIR for the child is the same when
|
|
||||||
# sysconfig.is_python_build() is true. See issue 15300.
|
|
||||||
kw = dict(
|
|
||||||
env=env,
|
|
||||||
stdout=output_file,
|
|
||||||
# bpo-45410: Write stderr into stdout to keep messages order
|
|
||||||
stderr=output_file,
|
|
||||||
text=True,
|
|
||||||
close_fds=(os.name != 'nt'),
|
|
||||||
cwd=os_helper.SAVEDCWD,
|
|
||||||
)
|
|
||||||
if USE_PROCESS_GROUP:
|
|
||||||
kw['start_new_session'] = True
|
|
||||||
return subprocess.Popen(cmd, **kw)
|
|
||||||
|
|
||||||
|
|
||||||
def worker_process(worker_json: StrJSON) -> NoReturn:
|
|
||||||
runtests = RunTests.from_json(worker_json)
|
|
||||||
test_name = runtests.tests[0]
|
|
||||||
match_tests: FilterTuple | None = runtests.match_tests
|
|
||||||
|
|
||||||
setup_test_dir(runtests.test_dir)
|
|
||||||
setup_tests(runtests)
|
|
||||||
|
|
||||||
if runtests.rerun:
|
|
||||||
if match_tests:
|
|
||||||
matching = "matching: " + ", ".join(match_tests)
|
|
||||||
print(f"Re-running {test_name} in verbose mode ({matching})", flush=True)
|
|
||||||
else:
|
|
||||||
print(f"Re-running {test_name} in verbose mode", flush=True)
|
|
||||||
|
|
||||||
result = run_single_test(test_name, runtests)
|
|
||||||
print() # Force a newline (just in case)
|
|
||||||
|
|
||||||
# Serialize TestResult as dict in JSON
|
|
||||||
json.dump(result, sys.stdout, cls=EncodeTestResult)
|
|
||||||
sys.stdout.flush()
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
|
|
||||||
# We do not use a generator so multiple threads can call next().
|
# We do not use a generator so multiple threads can call next().
|
||||||
class MultiprocessIterator:
|
class MultiprocessIterator:
|
||||||
|
@ -340,9 +270,7 @@ class WorkerThread(threading.Thread):
|
||||||
err_msg = "Failed to parse worker stdout"
|
err_msg = "Failed to parse worker stdout"
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
# deserialize run_tests_worker() output
|
result = TestResult.from_json(worker_json)
|
||||||
result = json.loads(worker_json,
|
|
||||||
object_hook=decode_test_result)
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
err_msg = "Failed to parse worker JSON: %s" % exc
|
err_msg = "Failed to parse worker JSON: %s" % exc
|
||||||
|
|
||||||
|
@ -562,27 +490,3 @@ class RunWorkers:
|
||||||
# worker when we exit this function
|
# worker when we exit this function
|
||||||
self.pending.stop()
|
self.pending.stop()
|
||||||
self.stop_workers()
|
self.stop_workers()
|
||||||
|
|
||||||
|
|
||||||
class EncodeTestResult(json.JSONEncoder):
|
|
||||||
"""Encode a TestResult (sub)class object into a JSON dict."""
|
|
||||||
|
|
||||||
def default(self, o: Any) -> dict[str, Any]:
|
|
||||||
if isinstance(o, TestResult):
|
|
||||||
result = dataclasses.asdict(o)
|
|
||||||
result["__test_result__"] = o.__class__.__name__
|
|
||||||
return result
|
|
||||||
|
|
||||||
return super().default(o)
|
|
||||||
|
|
||||||
|
|
||||||
def decode_test_result(d: dict[str, Any]) -> TestResult | dict[str, Any]:
|
|
||||||
"""Decode a TestResult (sub)class object from a JSON dict."""
|
|
||||||
|
|
||||||
if "__test_result__" not in d:
|
|
||||||
return d
|
|
||||||
|
|
||||||
d.pop('__test_result__')
|
|
||||||
if d['stats'] is not None:
|
|
||||||
d['stats'] = TestStats(**d['stats'])
|
|
||||||
return TestResult(**d)
|
|
||||||
|
|
|
@ -11,8 +11,8 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
gc = None
|
gc = None
|
||||||
|
|
||||||
from test.libregrtest.utils import (setup_unraisable_hook,
|
from test.libregrtest.utils import (
|
||||||
setup_threading_excepthook)
|
setup_unraisable_hook, setup_threading_excepthook, fix_umask)
|
||||||
|
|
||||||
|
|
||||||
UNICODE_GUARD_ENV = "PYTHONREGRTEST_UNICODE_GUARD"
|
UNICODE_GUARD_ENV = "PYTHONREGRTEST_UNICODE_GUARD"
|
||||||
|
@ -26,6 +26,8 @@ def setup_test_dir(testdir: str | None) -> None:
|
||||||
|
|
||||||
|
|
||||||
def setup_tests(runtests):
|
def setup_tests(runtests):
|
||||||
|
fix_umask()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
stderr_fd = sys.__stderr__.fileno()
|
stderr_fd = sys.__stderr__.fileno()
|
||||||
except (ValueError, AttributeError):
|
except (ValueError, AttributeError):
|
||||||
|
@ -102,7 +104,7 @@ def setup_tests(runtests):
|
||||||
support.SHORT_TIMEOUT = min(support.SHORT_TIMEOUT, timeout)
|
support.SHORT_TIMEOUT = min(support.SHORT_TIMEOUT, timeout)
|
||||||
support.LONG_TIMEOUT = min(support.LONG_TIMEOUT, timeout)
|
support.LONG_TIMEOUT = min(support.LONG_TIMEOUT, timeout)
|
||||||
|
|
||||||
if runtests.junit_filename:
|
if runtests.use_junit:
|
||||||
from test.support.testresult import RegressionTestResult
|
from test.support.testresult import RegressionTestResult
|
||||||
RegressionTestResult.USE_XML = True
|
RegressionTestResult.USE_XML = True
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,28 @@
|
||||||
|
import contextlib
|
||||||
|
import faulthandler
|
||||||
import math
|
import math
|
||||||
import os.path
|
import os.path
|
||||||
|
import random
|
||||||
import sys
|
import sys
|
||||||
import sysconfig
|
import sysconfig
|
||||||
|
import tempfile
|
||||||
import textwrap
|
import textwrap
|
||||||
|
|
||||||
from test import support
|
from test import support
|
||||||
|
from test.support import os_helper
|
||||||
|
from test.support import threading_helper
|
||||||
|
|
||||||
|
|
||||||
MS_WINDOWS = (sys.platform == 'win32')
|
MS_WINDOWS = (sys.platform == 'win32')
|
||||||
|
|
||||||
|
# bpo-38203: Maximum delay in seconds to exit Python (call Py_Finalize()).
|
||||||
|
# Used to protect against threading._shutdown() hang.
|
||||||
|
# Must be smaller than buildbot "1200 seconds without output" limit.
|
||||||
|
EXIT_TIMEOUT = 120.0
|
||||||
|
|
||||||
|
|
||||||
|
StrPath = str
|
||||||
|
|
||||||
|
|
||||||
def format_duration(seconds):
|
def format_duration(seconds):
|
||||||
ms = math.ceil(seconds * 1e3)
|
ms = math.ceil(seconds * 1e3)
|
||||||
|
@ -308,3 +323,69 @@ def get_build_info():
|
||||||
build.append("dtrace")
|
build.append("dtrace")
|
||||||
|
|
||||||
return build
|
return build
|
||||||
|
|
||||||
|
|
||||||
|
def get_temp_dir(tmp_dir):
|
||||||
|
if tmp_dir:
|
||||||
|
tmp_dir = os.path.expanduser(tmp_dir)
|
||||||
|
else:
|
||||||
|
# When tests are run from the Python build directory, it is best practice
|
||||||
|
# to keep the test files in a subfolder. This eases the cleanup of leftover
|
||||||
|
# files using the "make distclean" command.
|
||||||
|
if sysconfig.is_python_build():
|
||||||
|
tmp_dir = sysconfig.get_config_var('abs_builddir')
|
||||||
|
if tmp_dir is None:
|
||||||
|
# bpo-30284: On Windows, only srcdir is available. Using
|
||||||
|
# abs_builddir mostly matters on UNIX when building Python
|
||||||
|
# out of the source tree, especially when the source tree
|
||||||
|
# is read only.
|
||||||
|
tmp_dir = sysconfig.get_config_var('srcdir')
|
||||||
|
tmp_dir = os.path.join(tmp_dir, 'build')
|
||||||
|
else:
|
||||||
|
tmp_dir = tempfile.gettempdir()
|
||||||
|
|
||||||
|
return os.path.abspath(tmp_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def fix_umask():
|
||||||
|
if support.is_emscripten:
|
||||||
|
# Emscripten has default umask 0o777, which breaks some tests.
|
||||||
|
# see https://github.com/emscripten-core/emscripten/issues/17269
|
||||||
|
old_mask = os.umask(0)
|
||||||
|
if old_mask == 0o777:
|
||||||
|
os.umask(0o027)
|
||||||
|
else:
|
||||||
|
os.umask(old_mask)
|
||||||
|
|
||||||
|
|
||||||
|
def get_work_dir(*, parent_dir: StrPath = '', worker: bool = False):
|
||||||
|
# Define a writable temp dir that will be used as cwd while running
|
||||||
|
# the tests. The name of the dir includes the pid to allow parallel
|
||||||
|
# testing (see the -j option).
|
||||||
|
# Emscripten and WASI have stubbed getpid(), Emscripten has only
|
||||||
|
# milisecond clock resolution. Use randint() instead.
|
||||||
|
if sys.platform in {"emscripten", "wasi"}:
|
||||||
|
nounce = random.randint(0, 1_000_000)
|
||||||
|
else:
|
||||||
|
nounce = os.getpid()
|
||||||
|
|
||||||
|
if worker:
|
||||||
|
work_dir = 'test_python_worker_{}'.format(nounce)
|
||||||
|
else:
|
||||||
|
work_dir = 'test_python_{}'.format(nounce)
|
||||||
|
work_dir += os_helper.FS_NONASCII
|
||||||
|
if parent_dir:
|
||||||
|
work_dir = os.path.join(parent_dir, work_dir)
|
||||||
|
return work_dir
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def exit_timeout():
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
except SystemExit as exc:
|
||||||
|
# bpo-38203: Python can hang at exit in Py_Finalize(), especially
|
||||||
|
# on threading._shutdown() call: put a timeout
|
||||||
|
if threading_helper.can_start_thread:
|
||||||
|
faulthandler.dump_traceback_later(EXIT_TIMEOUT, exit=True)
|
||||||
|
sys.exit(exc.code)
|
||||||
|
|
93
Lib/test/libregrtest/worker.py
Normal file
93
Lib/test/libregrtest/worker.py
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from typing import TextIO, NoReturn
|
||||||
|
|
||||||
|
from test import support
|
||||||
|
from test.support import os_helper
|
||||||
|
|
||||||
|
from test.libregrtest.setup import setup_tests, setup_test_dir
|
||||||
|
from test.libregrtest.runtest import (
|
||||||
|
run_single_test, StrJSON, FilterTuple, RunTests)
|
||||||
|
from test.libregrtest.utils import get_work_dir, exit_timeout, StrPath
|
||||||
|
|
||||||
|
|
||||||
|
USE_PROCESS_GROUP = (hasattr(os, "setsid") and hasattr(os, "killpg"))
|
||||||
|
|
||||||
|
|
||||||
|
def create_worker_process(runtests: RunTests,
|
||||||
|
output_file: TextIO,
|
||||||
|
tmp_dir: StrPath | None = None) -> subprocess.Popen:
|
||||||
|
python_cmd = runtests.python_cmd
|
||||||
|
worker_json = runtests.as_json()
|
||||||
|
|
||||||
|
if python_cmd is not None:
|
||||||
|
executable = python_cmd
|
||||||
|
else:
|
||||||
|
executable = [sys.executable]
|
||||||
|
cmd = [*executable, *support.args_from_interpreter_flags(),
|
||||||
|
'-u', # Unbuffered stdout and stderr
|
||||||
|
'-m', 'test.libregrtest.worker',
|
||||||
|
worker_json]
|
||||||
|
|
||||||
|
env = dict(os.environ)
|
||||||
|
if tmp_dir is not None:
|
||||||
|
env['TMPDIR'] = tmp_dir
|
||||||
|
env['TEMP'] = tmp_dir
|
||||||
|
env['TMP'] = tmp_dir
|
||||||
|
|
||||||
|
# Running the child from the same working directory as regrtest's original
|
||||||
|
# invocation ensures that TEMPDIR for the child is the same when
|
||||||
|
# sysconfig.is_python_build() is true. See issue 15300.
|
||||||
|
kw = dict(
|
||||||
|
env=env,
|
||||||
|
stdout=output_file,
|
||||||
|
# bpo-45410: Write stderr into stdout to keep messages order
|
||||||
|
stderr=output_file,
|
||||||
|
text=True,
|
||||||
|
close_fds=(os.name != 'nt'),
|
||||||
|
)
|
||||||
|
if USE_PROCESS_GROUP:
|
||||||
|
kw['start_new_session'] = True
|
||||||
|
return subprocess.Popen(cmd, **kw)
|
||||||
|
|
||||||
|
|
||||||
|
def worker_process(worker_json: StrJSON) -> NoReturn:
|
||||||
|
runtests = RunTests.from_json(worker_json)
|
||||||
|
test_name = runtests.tests[0]
|
||||||
|
match_tests: FilterTuple | None = runtests.match_tests
|
||||||
|
|
||||||
|
setup_test_dir(runtests.test_dir)
|
||||||
|
setup_tests(runtests)
|
||||||
|
|
||||||
|
if runtests.rerun:
|
||||||
|
if match_tests:
|
||||||
|
matching = "matching: " + ", ".join(match_tests)
|
||||||
|
print(f"Re-running {test_name} in verbose mode ({matching})", flush=True)
|
||||||
|
else:
|
||||||
|
print(f"Re-running {test_name} in verbose mode", flush=True)
|
||||||
|
|
||||||
|
result = run_single_test(test_name, runtests)
|
||||||
|
print() # Force a newline (just in case)
|
||||||
|
|
||||||
|
# Serialize TestResult as dict in JSON
|
||||||
|
result.write_json(sys.stdout)
|
||||||
|
sys.stdout.flush()
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if len(sys.argv) != 2:
|
||||||
|
print("usage: python -m test.libregrtest.worker JSON")
|
||||||
|
sys.exit(1)
|
||||||
|
worker_json = sys.argv[1]
|
||||||
|
|
||||||
|
work_dir = get_work_dir(worker=True)
|
||||||
|
|
||||||
|
with exit_timeout():
|
||||||
|
with os_helper.temp_cwd(work_dir, quiet=True):
|
||||||
|
worker_process(worker_json)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -75,11 +75,6 @@ class ParseArgsTestCase(unittest.TestCase):
|
||||||
ns = libregrtest._parse_args(['--wait'])
|
ns = libregrtest._parse_args(['--wait'])
|
||||||
self.assertTrue(ns.wait)
|
self.assertTrue(ns.wait)
|
||||||
|
|
||||||
def test_worker_json(self):
|
|
||||||
ns = libregrtest._parse_args(['--worker-json', '[[], {}]'])
|
|
||||||
self.assertEqual(ns.worker_json, '[[], {}]')
|
|
||||||
self.checkError(['--worker-json'], 'expected one argument')
|
|
||||||
|
|
||||||
def test_start(self):
|
def test_start(self):
|
||||||
for opt in '-S', '--start':
|
for opt in '-S', '--start':
|
||||||
with self.subTest(opt=opt):
|
with self.subTest(opt=opt):
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue