mirror of
https://github.com/python/cpython.git
synced 2025-08-04 08:59:19 +00:00
[3.14] gh-136476: Show the full stack in get_async_stack_trace in _remote_debugging (GH-136483) (#136490)
gh-136476: Show the full stack in get_async_stack_trace in _remote_debugging (GH-136483)
(cherry picked from commit ea45a2f97c
)
Co-authored-by: Pablo Galindo Salgado <Pablogsal@gmail.com>
This commit is contained in:
parent
b733bf7437
commit
4d025a2318
4 changed files with 860 additions and 635 deletions
|
@ -8,7 +8,12 @@ import threading
|
|||
import time
|
||||
from asyncio import staggered, taskgroups, base_events, tasks
|
||||
from unittest.mock import ANY
|
||||
from test.support import os_helper, SHORT_TIMEOUT, busy_retry, requires_gil_enabled
|
||||
from test.support import (
|
||||
os_helper,
|
||||
SHORT_TIMEOUT,
|
||||
busy_retry,
|
||||
requires_gil_enabled,
|
||||
)
|
||||
from test.support.script_helper import make_script
|
||||
from test.support.socket_helper import find_unused_port
|
||||
|
||||
|
@ -236,55 +241,162 @@ class TestGetStackTrace(unittest.TestCase):
|
|||
p.terminate()
|
||||
p.wait(timeout=SHORT_TIMEOUT)
|
||||
|
||||
# sets are unordered, so we want to sort "awaited_by"s
|
||||
stack_trace[2].sort(key=lambda x: x[1])
|
||||
# First check all the tasks are present
|
||||
tasks_names = [
|
||||
task.task_name for task in stack_trace[0].awaited_by
|
||||
]
|
||||
for task_name in ["c2_root", "sub_main_1", "sub_main_2"]:
|
||||
self.assertIn(task_name, tasks_names)
|
||||
|
||||
expected_stack_trace = [
|
||||
[
|
||||
FrameInfo([script_name, 10, "c5"]),
|
||||
FrameInfo([script_name, 14, "c4"]),
|
||||
FrameInfo([script_name, 17, "c3"]),
|
||||
FrameInfo([script_name, 20, "c2"]),
|
||||
],
|
||||
"c2_root",
|
||||
[
|
||||
CoroInfo(
|
||||
[
|
||||
[
|
||||
FrameInfo(
|
||||
# Now ensure that the awaited_by_relationships are correct
|
||||
id_to_task = {
|
||||
task.task_id: task for task in stack_trace[0].awaited_by
|
||||
}
|
||||
task_name_to_awaited_by = {
|
||||
task.task_name: set(
|
||||
id_to_task[awaited.task_name].task_name
|
||||
for awaited in task.awaited_by
|
||||
)
|
||||
for task in stack_trace[0].awaited_by
|
||||
}
|
||||
self.assertEqual(
|
||||
task_name_to_awaited_by,
|
||||
{
|
||||
"c2_root": {"Task-1", "sub_main_1", "sub_main_2"},
|
||||
"Task-1": set(),
|
||||
"sub_main_1": {"Task-1"},
|
||||
"sub_main_2": {"Task-1"},
|
||||
},
|
||||
)
|
||||
|
||||
# Now ensure that the coroutine stacks are correct
|
||||
coroutine_stacks = {
|
||||
task.task_name: sorted(
|
||||
tuple(tuple(frame) for frame in coro.call_stack)
|
||||
for coro in task.coroutine_stack
|
||||
)
|
||||
for task in stack_trace[0].awaited_by
|
||||
}
|
||||
self.assertEqual(
|
||||
coroutine_stacks,
|
||||
{
|
||||
"Task-1": [
|
||||
(
|
||||
tuple(
|
||||
[
|
||||
taskgroups.__file__,
|
||||
ANY,
|
||||
"TaskGroup._aexit",
|
||||
]
|
||||
),
|
||||
tuple(
|
||||
[
|
||||
taskgroups.__file__,
|
||||
ANY,
|
||||
"TaskGroup.__aexit__",
|
||||
]
|
||||
),
|
||||
tuple([script_name, 26, "main"]),
|
||||
)
|
||||
],
|
||||
"c2_root": [
|
||||
(
|
||||
tuple([script_name, 10, "c5"]),
|
||||
tuple([script_name, 14, "c4"]),
|
||||
tuple([script_name, 17, "c3"]),
|
||||
tuple([script_name, 20, "c2"]),
|
||||
)
|
||||
],
|
||||
"sub_main_1": [(tuple([script_name, 23, "c1"]),)],
|
||||
"sub_main_2": [(tuple([script_name, 23, "c1"]),)],
|
||||
},
|
||||
)
|
||||
|
||||
# Now ensure the coroutine stacks for the awaited_by relationships are correct.
|
||||
awaited_by_coroutine_stacks = {
|
||||
task.task_name: sorted(
|
||||
(
|
||||
id_to_task[coro.task_name].task_name,
|
||||
tuple(tuple(frame) for frame in coro.call_stack),
|
||||
)
|
||||
for coro in task.awaited_by
|
||||
)
|
||||
for task in stack_trace[0].awaited_by
|
||||
}
|
||||
self.assertEqual(
|
||||
awaited_by_coroutine_stacks,
|
||||
{
|
||||
"Task-1": [],
|
||||
"c2_root": [
|
||||
(
|
||||
"Task-1",
|
||||
(
|
||||
tuple(
|
||||
[
|
||||
taskgroups.__file__,
|
||||
ANY,
|
||||
"TaskGroup._aexit",
|
||||
]
|
||||
),
|
||||
FrameInfo(
|
||||
tuple(
|
||||
[
|
||||
taskgroups.__file__,
|
||||
ANY,
|
||||
"TaskGroup.__aexit__",
|
||||
]
|
||||
),
|
||||
FrameInfo([script_name, 26, "main"]),
|
||||
],
|
||||
tuple([script_name, 26, "main"]),
|
||||
),
|
||||
),
|
||||
("sub_main_1", (tuple([script_name, 23, "c1"]),)),
|
||||
("sub_main_2", (tuple([script_name, 23, "c1"]),)),
|
||||
],
|
||||
"sub_main_1": [
|
||||
(
|
||||
"Task-1",
|
||||
]
|
||||
),
|
||||
CoroInfo(
|
||||
[
|
||||
[FrameInfo([script_name, 23, "c1"])],
|
||||
"sub_main_1",
|
||||
]
|
||||
),
|
||||
CoroInfo(
|
||||
[
|
||||
[FrameInfo([script_name, 23, "c1"])],
|
||||
"sub_main_2",
|
||||
]
|
||||
),
|
||||
],
|
||||
]
|
||||
self.assertEqual(stack_trace, expected_stack_trace)
|
||||
(
|
||||
tuple(
|
||||
[
|
||||
taskgroups.__file__,
|
||||
ANY,
|
||||
"TaskGroup._aexit",
|
||||
]
|
||||
),
|
||||
tuple(
|
||||
[
|
||||
taskgroups.__file__,
|
||||
ANY,
|
||||
"TaskGroup.__aexit__",
|
||||
]
|
||||
),
|
||||
tuple([script_name, 26, "main"]),
|
||||
),
|
||||
)
|
||||
],
|
||||
"sub_main_2": [
|
||||
(
|
||||
"Task-1",
|
||||
(
|
||||
tuple(
|
||||
[
|
||||
taskgroups.__file__,
|
||||
ANY,
|
||||
"TaskGroup._aexit",
|
||||
]
|
||||
),
|
||||
tuple(
|
||||
[
|
||||
taskgroups.__file__,
|
||||
ANY,
|
||||
"TaskGroup.__aexit__",
|
||||
]
|
||||
),
|
||||
tuple([script_name, 26, "main"]),
|
||||
),
|
||||
)
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
@skip_if_not_supported
|
||||
@unittest.skipIf(
|
||||
|
@ -350,19 +462,29 @@ class TestGetStackTrace(unittest.TestCase):
|
|||
p.terminate()
|
||||
p.wait(timeout=SHORT_TIMEOUT)
|
||||
|
||||
# sets are unordered, so we want to sort "awaited_by"s
|
||||
stack_trace[2].sort(key=lambda x: x[1])
|
||||
# For this simple asyncgen test, we only expect one task with the full coroutine stack
|
||||
self.assertEqual(len(stack_trace[0].awaited_by), 1)
|
||||
task = stack_trace[0].awaited_by[0]
|
||||
self.assertEqual(task.task_name, "Task-1")
|
||||
|
||||
expected_stack_trace = [
|
||||
# Check the coroutine stack - based on actual output, only shows main
|
||||
coroutine_stack = sorted(
|
||||
tuple(tuple(frame) for frame in coro.call_stack)
|
||||
for coro in task.coroutine_stack
|
||||
)
|
||||
self.assertEqual(
|
||||
coroutine_stack,
|
||||
[
|
||||
FrameInfo([script_name, 10, "gen_nested_call"]),
|
||||
FrameInfo([script_name, 16, "gen"]),
|
||||
FrameInfo([script_name, 19, "main"]),
|
||||
(
|
||||
tuple([script_name, 10, "gen_nested_call"]),
|
||||
tuple([script_name, 16, "gen"]),
|
||||
tuple([script_name, 19, "main"]),
|
||||
)
|
||||
],
|
||||
"Task-1",
|
||||
[],
|
||||
]
|
||||
self.assertEqual(stack_trace, expected_stack_trace)
|
||||
)
|
||||
|
||||
# No awaited_by relationships expected for this simple case
|
||||
self.assertEqual(task.awaited_by, [])
|
||||
|
||||
@skip_if_not_supported
|
||||
@unittest.skipIf(
|
||||
|
@ -429,18 +551,73 @@ class TestGetStackTrace(unittest.TestCase):
|
|||
p.terminate()
|
||||
p.wait(timeout=SHORT_TIMEOUT)
|
||||
|
||||
# sets are unordered, so we want to sort "awaited_by"s
|
||||
stack_trace[2].sort(key=lambda x: x[1])
|
||||
|
||||
expected_stack_trace = [
|
||||
[
|
||||
FrameInfo([script_name, 11, "deep"]),
|
||||
FrameInfo([script_name, 15, "c1"]),
|
||||
],
|
||||
"Task-2",
|
||||
[CoroInfo([[FrameInfo([script_name, 21, "main"])], "Task-1"])],
|
||||
# First check all the tasks are present
|
||||
tasks_names = [
|
||||
task.task_name for task in stack_trace[0].awaited_by
|
||||
]
|
||||
self.assertEqual(stack_trace, expected_stack_trace)
|
||||
for task_name in ["Task-1", "Task-2"]:
|
||||
self.assertIn(task_name, tasks_names)
|
||||
|
||||
# Now ensure that the awaited_by_relationships are correct
|
||||
id_to_task = {
|
||||
task.task_id: task for task in stack_trace[0].awaited_by
|
||||
}
|
||||
task_name_to_awaited_by = {
|
||||
task.task_name: set(
|
||||
id_to_task[awaited.task_name].task_name
|
||||
for awaited in task.awaited_by
|
||||
)
|
||||
for task in stack_trace[0].awaited_by
|
||||
}
|
||||
self.assertEqual(
|
||||
task_name_to_awaited_by,
|
||||
{
|
||||
"Task-1": set(),
|
||||
"Task-2": {"Task-1"},
|
||||
},
|
||||
)
|
||||
|
||||
# Now ensure that the coroutine stacks are correct
|
||||
coroutine_stacks = {
|
||||
task.task_name: sorted(
|
||||
tuple(tuple(frame) for frame in coro.call_stack)
|
||||
for coro in task.coroutine_stack
|
||||
)
|
||||
for task in stack_trace[0].awaited_by
|
||||
}
|
||||
self.assertEqual(
|
||||
coroutine_stacks,
|
||||
{
|
||||
"Task-1": [(tuple([script_name, 21, "main"]),)],
|
||||
"Task-2": [
|
||||
(
|
||||
tuple([script_name, 11, "deep"]),
|
||||
tuple([script_name, 15, "c1"]),
|
||||
)
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
# Now ensure the coroutine stacks for the awaited_by relationships are correct.
|
||||
awaited_by_coroutine_stacks = {
|
||||
task.task_name: sorted(
|
||||
(
|
||||
id_to_task[coro.task_name].task_name,
|
||||
tuple(tuple(frame) for frame in coro.call_stack),
|
||||
)
|
||||
for coro in task.awaited_by
|
||||
)
|
||||
for task in stack_trace[0].awaited_by
|
||||
}
|
||||
self.assertEqual(
|
||||
awaited_by_coroutine_stacks,
|
||||
{
|
||||
"Task-1": [],
|
||||
"Task-2": [
|
||||
("Task-1", (tuple([script_name, 21, "main"]),))
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
@skip_if_not_supported
|
||||
@unittest.skipIf(
|
||||
|
@ -510,36 +687,93 @@ class TestGetStackTrace(unittest.TestCase):
|
|||
p.terminate()
|
||||
p.wait(timeout=SHORT_TIMEOUT)
|
||||
|
||||
# sets are unordered, so we want to sort "awaited_by"s
|
||||
stack_trace[2].sort(key=lambda x: x[1])
|
||||
expected_stack_trace = [
|
||||
[
|
||||
FrameInfo([script_name, 11, "deep"]),
|
||||
FrameInfo([script_name, 15, "c1"]),
|
||||
FrameInfo(
|
||||
[
|
||||
staggered.__file__,
|
||||
ANY,
|
||||
"staggered_race.<locals>.run_one_coro",
|
||||
]
|
||||
),
|
||||
],
|
||||
"Task-2",
|
||||
[
|
||||
CoroInfo(
|
||||
[
|
||||
[
|
||||
FrameInfo(
|
||||
# First check all the tasks are present
|
||||
tasks_names = [
|
||||
task.task_name for task in stack_trace[0].awaited_by
|
||||
]
|
||||
for task_name in ["Task-1", "Task-2"]:
|
||||
self.assertIn(task_name, tasks_names)
|
||||
|
||||
# Now ensure that the awaited_by_relationships are correct
|
||||
id_to_task = {
|
||||
task.task_id: task for task in stack_trace[0].awaited_by
|
||||
}
|
||||
task_name_to_awaited_by = {
|
||||
task.task_name: set(
|
||||
id_to_task[awaited.task_name].task_name
|
||||
for awaited in task.awaited_by
|
||||
)
|
||||
for task in stack_trace[0].awaited_by
|
||||
}
|
||||
self.assertEqual(
|
||||
task_name_to_awaited_by,
|
||||
{
|
||||
"Task-1": set(),
|
||||
"Task-2": {"Task-1"},
|
||||
},
|
||||
)
|
||||
|
||||
# Now ensure that the coroutine stacks are correct
|
||||
coroutine_stacks = {
|
||||
task.task_name: sorted(
|
||||
tuple(tuple(frame) for frame in coro.call_stack)
|
||||
for coro in task.coroutine_stack
|
||||
)
|
||||
for task in stack_trace[0].awaited_by
|
||||
}
|
||||
self.assertEqual(
|
||||
coroutine_stacks,
|
||||
{
|
||||
"Task-1": [
|
||||
(
|
||||
tuple([staggered.__file__, ANY, "staggered_race"]),
|
||||
tuple([script_name, 21, "main"]),
|
||||
)
|
||||
],
|
||||
"Task-2": [
|
||||
(
|
||||
tuple([script_name, 11, "deep"]),
|
||||
tuple([script_name, 15, "c1"]),
|
||||
tuple(
|
||||
[
|
||||
staggered.__file__,
|
||||
ANY,
|
||||
"staggered_race.<locals>.run_one_coro",
|
||||
]
|
||||
),
|
||||
)
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
# Now ensure the coroutine stacks for the awaited_by relationships are correct.
|
||||
awaited_by_coroutine_stacks = {
|
||||
task.task_name: sorted(
|
||||
(
|
||||
id_to_task[coro.task_name].task_name,
|
||||
tuple(tuple(frame) for frame in coro.call_stack),
|
||||
)
|
||||
for coro in task.awaited_by
|
||||
)
|
||||
for task in stack_trace[0].awaited_by
|
||||
}
|
||||
self.assertEqual(
|
||||
awaited_by_coroutine_stacks,
|
||||
{
|
||||
"Task-1": [],
|
||||
"Task-2": [
|
||||
(
|
||||
"Task-1",
|
||||
(
|
||||
tuple(
|
||||
[staggered.__file__, ANY, "staggered_race"]
|
||||
),
|
||||
FrameInfo([script_name, 21, "main"]),
|
||||
],
|
||||
"Task-1",
|
||||
]
|
||||
)
|
||||
],
|
||||
]
|
||||
self.assertEqual(stack_trace, expected_stack_trace)
|
||||
tuple([script_name, 21, "main"]),
|
||||
),
|
||||
)
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
@skip_if_not_supported
|
||||
@unittest.skipIf(
|
||||
|
@ -973,7 +1207,10 @@ class TestGetStackTrace(unittest.TestCase):
|
|||
if not stack:
|
||||
continue
|
||||
current_frame = stack[0]
|
||||
if current_frame.funcname == "main_work" and current_frame.lineno >15:
|
||||
if (
|
||||
current_frame.funcname == "main_work"
|
||||
and current_frame.lineno > 15
|
||||
):
|
||||
found = True
|
||||
|
||||
if found:
|
||||
|
@ -981,7 +1218,9 @@ class TestGetStackTrace(unittest.TestCase):
|
|||
# Give a bit of time to take the next sample
|
||||
time.sleep(0.1)
|
||||
else:
|
||||
self.fail("Main thread did not start its busy work on time")
|
||||
self.fail(
|
||||
"Main thread did not start its busy work on time"
|
||||
)
|
||||
|
||||
# Get stack trace with only GIL holder
|
||||
unwinder_gil = RemoteUnwinder(p.pid, only_active_thread=True)
|
||||
|
@ -999,16 +1238,23 @@ class TestGetStackTrace(unittest.TestCase):
|
|||
p.wait(timeout=SHORT_TIMEOUT)
|
||||
|
||||
# Verify we got multiple threads in all_traces
|
||||
self.assertGreater(len(all_traces), 1, "Should have multiple threads")
|
||||
self.assertGreater(
|
||||
len(all_traces), 1, "Should have multiple threads"
|
||||
)
|
||||
|
||||
# Verify we got exactly one thread in gil_traces
|
||||
self.assertEqual(len(gil_traces), 1, "Should have exactly one GIL holder")
|
||||
self.assertEqual(
|
||||
len(gil_traces), 1, "Should have exactly one GIL holder"
|
||||
)
|
||||
|
||||
# The GIL holder should be in the all_traces list
|
||||
gil_thread_id = gil_traces[0][0]
|
||||
all_thread_ids = [trace[0] for trace in all_traces]
|
||||
self.assertIn(gil_thread_id, all_thread_ids,
|
||||
"GIL holder should be among all threads")
|
||||
self.assertIn(
|
||||
gil_thread_id,
|
||||
all_thread_ids,
|
||||
"GIL holder should be among all threads",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
Fix a bug that was causing the ``get_async_stack_trace`` function to miss
|
||||
some frames in the stack trace.
|
File diff suppressed because it is too large
Load diff
47
Modules/clinic/_remote_debugging_module.c.h
generated
47
Modules/clinic/_remote_debugging_module.c.h
generated
|
@ -235,26 +235,41 @@ PyDoc_STRVAR(_remote_debugging_RemoteUnwinder_get_async_stack_trace__doc__,
|
|||
"get_async_stack_trace($self, /)\n"
|
||||
"--\n"
|
||||
"\n"
|
||||
"Returns information about the currently running async task and its stack trace.\n"
|
||||
"Get the currently running async tasks and their dependency graphs from the remote process.\n"
|
||||
"\n"
|
||||
"Returns a tuple of (task_info, stack_frames) where:\n"
|
||||
"- task_info is a tuple of (task_id, task_name) identifying the task\n"
|
||||
"- stack_frames is a list of tuples (function_name, filename, line_number) representing\n"
|
||||
" the Python stack frames for the task, ordered from most recent to oldest\n"
|
||||
"This returns information about running tasks and all tasks that are waiting for them,\n"
|
||||
"forming a complete dependency graph for each thread\'s active task.\n"
|
||||
"\n"
|
||||
"Example:\n"
|
||||
" ((4345585712, \'Task-1\'), [\n"
|
||||
" (\'run_echo_server\', \'server.py\', 127),\n"
|
||||
" (\'serve_forever\', \'server.py\', 45),\n"
|
||||
" (\'main\', \'app.py\', 23)\n"
|
||||
" ])\n"
|
||||
"For each thread with a running task, returns the running task plus all tasks that\n"
|
||||
"transitively depend on it (tasks waiting for the running task, tasks waiting for\n"
|
||||
"those tasks, etc.).\n"
|
||||
"\n"
|
||||
"Returns a list of per-thread results, where each thread result contains:\n"
|
||||
"- Thread ID\n"
|
||||
"- List of task information for the running task and all its waiters\n"
|
||||
"\n"
|
||||
"Each task info contains:\n"
|
||||
"- Task ID (memory address)\n"
|
||||
"- Task name\n"
|
||||
"- Call stack frames: List of (func_name, filename, lineno)\n"
|
||||
"- List of tasks waiting for this task (recursive structure)\n"
|
||||
"\n"
|
||||
"Raises:\n"
|
||||
" RuntimeError: If AsyncioDebug section is not available in the target process\n"
|
||||
" RuntimeError: If there is an error copying memory from the target process\n"
|
||||
" OSError: If there is an error accessing the target process\n"
|
||||
" PermissionError: If access to the target process is denied\n"
|
||||
" UnicodeDecodeError: If there is an error decoding strings from the target process");
|
||||
" MemoryError: If memory allocation fails\n"
|
||||
" OSError: If reading from the remote process fails\n"
|
||||
"\n"
|
||||
"Example output (similar structure to get_all_awaited_by but only for running tasks):\n"
|
||||
"[\n"
|
||||
" (140234, [\n"
|
||||
" (4345585712, \'main_task\',\n"
|
||||
" [(\"run_server\", \"server.py\", 127), (\"main\", \"app.py\", 23)],\n"
|
||||
" [\n"
|
||||
" (4345585800, \'worker_1\', [...], [...]),\n"
|
||||
" (4345585900, \'worker_2\', [...], [...])\n"
|
||||
" ])\n"
|
||||
" ])\n"
|
||||
"]");
|
||||
|
||||
#define _REMOTE_DEBUGGING_REMOTEUNWINDER_GET_ASYNC_STACK_TRACE_METHODDEF \
|
||||
{"get_async_stack_trace", (PyCFunction)_remote_debugging_RemoteUnwinder_get_async_stack_trace, METH_NOARGS, _remote_debugging_RemoteUnwinder_get_async_stack_trace__doc__},
|
||||
|
@ -273,4 +288,4 @@ _remote_debugging_RemoteUnwinder_get_async_stack_trace(PyObject *self, PyObject
|
|||
|
||||
return return_value;
|
||||
}
|
||||
/*[clinic end generated code: output=a37ab223d5081b16 input=a9049054013a1b77]*/
|
||||
/*[clinic end generated code: output=0dd1e6e8bab2a8b1 input=a9049054013a1b77]*/
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue