[3.12] gh-110666: Fix multiprocessing test_terminate() elapsed (GH-110667) (#110668)

gh-110666: Fix multiprocessing test_terminate() elapsed (GH-110667)

multiprocessing test_terminate() and test_wait_socket_slow() no
longer test the CI performance: no longer check maximum elapsed time.

Add CLOCK_RES constant: tolerate a difference of 100 ms.
(cherry picked from commit 1556f426da)

Co-authored-by: Victor Stinner <vstinner@python.org>
This commit is contained in:
Miss Islington (bot) 2023-10-11 04:13:01 +02:00 committed by GitHub
parent 55448a5b14
commit 18458a527f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -84,6 +84,11 @@ if support.HAVE_ASAN_FORK_BUG:
raise unittest.SkipTest("libasan has a pthread_create() dead lock related to thread+fork") raise unittest.SkipTest("libasan has a pthread_create() dead lock related to thread+fork")
# gh-110666: Tolerate a difference of 100 ms when comparing timings
# (clock resolution)
CLOCK_RES = 0.100
def latin(s): def latin(s):
return s.encode('latin') return s.encode('latin')
@ -1655,8 +1660,7 @@ class _TestCondition(BaseTestCase):
dt = time.monotonic() dt = time.monotonic()
result = cond.wait_for(lambda : state.value==4, timeout=expected) result = cond.wait_for(lambda : state.value==4, timeout=expected)
dt = time.monotonic() - dt dt = time.monotonic() - dt
# borrow logic in assertTimeout() from test/lock_tests.py if not result and (expected - CLOCK_RES) <= dt:
if not result and expected * 0.6 <= dt:
success.value = True success.value = True
@unittest.skipUnless(HAS_SHAREDCTYPES, 'needs sharedctypes') @unittest.skipUnless(HAS_SHAREDCTYPES, 'needs sharedctypes')
@ -2678,14 +2682,11 @@ class _TestPool(BaseTestCase):
p.join() p.join()
def test_terminate(self): def test_terminate(self):
result = self.pool.map_async( # Simulate slow tasks which take "forever" to complete
time.sleep, [0.1 for i in range(10000)], chunksize=1 args = [support.LONG_TIMEOUT for i in range(10_000)]
) result = self.pool.map_async(time.sleep, args, chunksize=1)
self.pool.terminate() self.pool.terminate()
join = TimingWrapper(self.pool.join) self.pool.join()
join()
# Sanity check the pool didn't wait for all tasks to finish
self.assertLess(join.elapsed, 2.0)
def test_empty_iterable(self): def test_empty_iterable(self):
# See Issue 12157 # See Issue 12157
@ -4870,7 +4871,7 @@ class TestWait(unittest.TestCase):
def _child_test_wait(cls, w, slow): def _child_test_wait(cls, w, slow):
for i in range(10): for i in range(10):
if slow: if slow:
time.sleep(random.random()*0.1) time.sleep(random.random() * 0.100)
w.send((i, os.getpid())) w.send((i, os.getpid()))
w.close() w.close()
@ -4910,7 +4911,7 @@ class TestWait(unittest.TestCase):
s.connect(address) s.connect(address)
for i in range(10): for i in range(10):
if slow: if slow:
time.sleep(random.random()*0.1) time.sleep(random.random() * 0.100)
s.sendall(('%s\n' % i).encode('ascii')) s.sendall(('%s\n' % i).encode('ascii'))
s.close() s.close()
@ -4959,25 +4960,19 @@ class TestWait(unittest.TestCase):
def test_wait_timeout(self): def test_wait_timeout(self):
from multiprocessing.connection import wait from multiprocessing.connection import wait
expected = 5 timeout = 5.0 # seconds
a, b = multiprocessing.Pipe() a, b = multiprocessing.Pipe()
start = time.monotonic() start = time.monotonic()
res = wait([a, b], expected) res = wait([a, b], timeout)
delta = time.monotonic() - start delta = time.monotonic() - start
self.assertEqual(res, []) self.assertEqual(res, [])
self.assertLess(delta, expected * 2) self.assertGreater(delta, timeout - CLOCK_RES)
self.assertGreater(delta, expected * 0.5)
b.send(None) b.send(None)
start = time.monotonic()
res = wait([a, b], 20) res = wait([a, b], 20)
delta = time.monotonic() - start
self.assertEqual(res, [a]) self.assertEqual(res, [a])
self.assertLess(delta, 0.4)
@classmethod @classmethod
def signal_and_sleep(cls, sem, period): def signal_and_sleep(cls, sem, period):