mirror of
https://github.com/microsoft/debugpy.git
synced 2025-12-23 08:48:12 +00:00
Keep protocol correct on a fork after a subprocess. Fixes #264
This commit is contained in:
parent
9cda8e0515
commit
fb7926fd80
4 changed files with 107 additions and 1 deletions
|
|
@ -8,6 +8,7 @@ from _pydevd_bundle.pydevd_constants import get_global_debugger, IS_WINDOWS, IS_
|
|||
from _pydev_bundle import pydev_log
|
||||
from contextlib import contextmanager
|
||||
from _pydevd_bundle import pydevd_constants
|
||||
from _pydevd_bundle.pydevd_defaults import PydevdCustomization
|
||||
|
||||
try:
|
||||
xrange
|
||||
|
|
@ -676,9 +677,12 @@ def create_fork(original_name):
|
|||
frame = frame.f_back
|
||||
frame = None # Just make sure we don't hold on to it.
|
||||
|
||||
protocol = pydevd_constants.get_protocol()
|
||||
|
||||
child_process = getattr(os, original_name)() # fork
|
||||
if not child_process:
|
||||
if is_new_python_process:
|
||||
PydevdCustomization.DEFAULT_PROTOCOL = protocol
|
||||
_on_forked_process(setup_tracing=apply_arg_patch and not is_subprocess_fork)
|
||||
else:
|
||||
if is_new_python_process:
|
||||
|
|
|
|||
|
|
@ -129,7 +129,7 @@ class NetCommand(_BaseNetCommand):
|
|||
|
||||
cls._showing_debug_info += 1
|
||||
try:
|
||||
out_message = 'sending cmd --> '
|
||||
out_message = 'sending cmd (%s) --> ' % (get_protocol(),)
|
||||
out_message += "%20s" % ID_TO_MEANING.get(str(cmd_id), 'UNKNOWN')
|
||||
out_message += ' '
|
||||
out_message += text.replace('\n', ' ')
|
||||
|
|
|
|||
|
|
@ -0,0 +1,28 @@
|
|||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def breaknow():
|
||||
print('break here')
|
||||
|
||||
|
||||
if '--fork-in-subprocess' in sys.argv:
|
||||
if sys.platform == 'win32':
|
||||
popen = subprocess.Popen([sys.executable, __file__, '--forked'])
|
||||
pid = popen.pid
|
||||
else:
|
||||
pid = os.fork()
|
||||
print('currently in pid: %s, ppid: %s' % (os.getpid(), os.getppid()))
|
||||
print('os.fork returned', pid)
|
||||
breaknow()
|
||||
|
||||
elif '--forked' in sys.argv:
|
||||
print('currently in pid: %s, ppid: %s' % (os.getpid(), os.getppid()))
|
||||
breaknow()
|
||||
|
||||
elif '--fork-in-subprocess' not in sys.argv:
|
||||
out = subprocess.check_output([sys.executable, __file__, '--fork-in-subprocess'])
|
||||
breaknow()
|
||||
print('\n\nin pid %s, output from subprocess.run:\n%s' % (os.getpid(), out.decode('utf-8')))
|
||||
print('TEST SUCEEDED!')
|
||||
|
|
@ -4047,6 +4047,80 @@ def test_subprocess_pydevd_customization(case_setup_remote, command_line_args):
|
|||
writer.finished_ok = True
|
||||
|
||||
|
||||
@pytest.mark.skipif(IS_PY26, reason='Only Python 2.7 onwards.')
|
||||
def test_subprocess_then_fork(case_setup_multiprocessing):
|
||||
import threading
|
||||
from tests_python.debugger_unittest import AbstractWriterThread
|
||||
|
||||
with case_setup_multiprocessing.test_file('_debugger_case_subprocess_and_fork.py') as writer:
|
||||
json_facade = JsonFacade(writer)
|
||||
json_facade.write_launch(justMyCode=False)
|
||||
|
||||
break_line = writer.get_line_index_with_content('break here')
|
||||
json_facade.write_set_breakpoints([break_line])
|
||||
|
||||
server_socket = writer.server_socket
|
||||
|
||||
class SecondaryProcessWriterThread(AbstractWriterThread):
|
||||
|
||||
TEST_FILE = writer.get_main_filename()
|
||||
_sequence = -1
|
||||
|
||||
class SecondaryProcessThreadCommunication(threading.Thread):
|
||||
|
||||
def run(self):
|
||||
from tests_python.debugger_unittest import ReaderThread
|
||||
|
||||
# Note that we accept 2 connections and then we proceed to receive the breakpoints.
|
||||
json_facades = []
|
||||
for i in range(2):
|
||||
server_socket.listen(1)
|
||||
self.server_socket = server_socket
|
||||
writer.log.append(' *** Multiprocess %s waiting on server_socket.accept()' % (i,))
|
||||
new_sock, addr = server_socket.accept()
|
||||
writer.log.append(' *** Multiprocess %s completed server_socket.accept()' % (i,))
|
||||
|
||||
reader_thread = ReaderThread(new_sock)
|
||||
reader_thread.name = ' *** Multiprocess %s Reader Thread' % i
|
||||
reader_thread.start()
|
||||
writer.log.append(' *** Multiprocess %s started ReaderThread' % (i,))
|
||||
|
||||
writer2 = SecondaryProcessWriterThread()
|
||||
writer2._WRITE_LOG_PREFIX = ' *** Multiprocess %s write: ' % i
|
||||
writer2.reader_thread = reader_thread
|
||||
writer2.sock = new_sock
|
||||
json_facade2 = JsonFacade(writer2, send_json_startup_messages=False)
|
||||
json_facade2.writer.write_multi_threads_single_notification(True)
|
||||
writer.log.append(' *** Multiprocess %s write attachThread' % (i,))
|
||||
json_facade2.write_attach(justMyCode=False)
|
||||
|
||||
writer.log.append(' *** Multiprocess %s write set breakpoints' % (i,))
|
||||
json_facade2.write_set_breakpoints([break_line])
|
||||
writer.log.append(' *** Multiprocess %s write make initial run' % (i,))
|
||||
json_facade2.write_make_initial_run()
|
||||
json_facades.append(json_facade2)
|
||||
|
||||
for i, json_facade3 in enumerate(json_facades):
|
||||
writer.log.append(' *** Multiprocess %s wait for thread stopped' % (i,))
|
||||
json_facade3.wait_for_thread_stopped(line=break_line)
|
||||
writer.log.append(' *** Multiprocess %s continue' % (i,))
|
||||
json_facade3.write_continue()
|
||||
|
||||
secondary_process_thread_communication = SecondaryProcessThreadCommunication()
|
||||
secondary_process_thread_communication.start()
|
||||
time.sleep(.1)
|
||||
json_facade.write_make_initial_run()
|
||||
|
||||
secondary_process_thread_communication.join(20)
|
||||
if secondary_process_thread_communication.is_alive():
|
||||
raise AssertionError('The SecondaryProcessThreadCommunication did not finish')
|
||||
|
||||
json_facade.wait_for_thread_stopped(line=break_line)
|
||||
json_facade.write_continue()
|
||||
|
||||
writer.finished_ok = True
|
||||
|
||||
|
||||
@pytest.mark.parametrize('apply_multiprocessing_patch', [True, False])
|
||||
def test_no_subprocess_patching(case_setup_multiprocessing, apply_multiprocessing_patch):
|
||||
import threading
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue