Fix issue quoting args and only send continued event when actually continued. Fixes #1261

This commit is contained in:
fabioz 2019-03-23 10:03:34 -03:00 committed by Fabio Zadrozny
parent 9c7c4615fd
commit da47b051b8
8 changed files with 164 additions and 15 deletions

View file

@ -23,6 +23,7 @@ def log_debug(msg):
def log_error_once(msg):
pydev_log.error_once(msg)
pydev_src_dir = os.path.dirname(os.path.dirname(__file__))
@ -80,8 +81,9 @@ def remove_quotes_from_args(args):
if sys.platform == "win32":
new_args = []
for x in args:
if len(x) > 1 and x.startswith('"') and x.endswith('"'):
x = x[1:-1]
if x != '""':
if len(x) > 1 and x.startswith('"') and x.endswith('"'):
x = x[1:-1]
new_args.append(x)
return new_args
else:
@ -146,7 +148,7 @@ def get_c_option_index(args):
def patch_args(args):
try:
log_debug("Patching args: %s"% str(args))
log_debug("Patching args: %s" % str(args))
args = remove_quotes_from_args(args)
from pydevd import SetupHolder
@ -321,6 +323,7 @@ def patch_arg_str_win(arg_str):
log_debug("New args: %s" % arg_str)
return arg_str
def monkey_patch_module(module, funcname, create_func):
if hasattr(module, funcname):
original_name = 'original_' + funcname
@ -340,6 +343,7 @@ def warn_multiproc():
# "pydev debugger: To debug that process please enable 'Attach to subprocess automatically while debugging?' option in the debugger settings.\n")
#
def create_warn_multiproc(original_name):
def new_warn_multiproc(*args):
@ -348,9 +352,12 @@ def create_warn_multiproc(original_name):
warn_multiproc()
return getattr(os, original_name)(*args)
return new_warn_multiproc
def create_execl(original_name):
def new_execl(path, *args):
"""
os.execl(path, arg0, arg1, ...)
@ -362,10 +369,12 @@ def create_execl(original_name):
args = patch_args(args)
send_process_created_message()
return getattr(os, original_name)(path, *args)
return new_execl
def create_execv(original_name):
def new_execv(path, args):
"""
os.execv(path, args)
@ -374,6 +383,7 @@ def create_execv(original_name):
import os
send_process_created_message()
return getattr(os, original_name)(path, patch_args(args))
return new_execv
@ -382,14 +392,17 @@ def create_execve(original_name):
os.execve(path, args, env)
os.execvpe(file, args, env)
"""
def new_execve(path, args, env):
import os
send_process_created_message()
return getattr(os, original_name)(path, patch_args(args), env)
return new_execve
def create_spawnl(original_name):
def new_spawnl(mode, path, *args):
"""
os.spawnl(mode, path, arg0, arg1, ...)
@ -399,10 +412,12 @@ def create_spawnl(original_name):
args = patch_args(args)
send_process_created_message()
return getattr(os, original_name)(mode, path, *args)
return new_spawnl
def create_spawnv(original_name):
def new_spawnv(mode, path, args):
"""
os.spawnv(mode, path, args)
@ -411,6 +426,7 @@ def create_spawnv(original_name):
import os
send_process_created_message()
return getattr(os, original_name)(mode, path, patch_args(args))
return new_spawnv
@ -419,10 +435,12 @@ def create_spawnve(original_name):
os.spawnve(mode, path, args, env)
os.spawnvpe(mode, file, args, env)
"""
def new_spawnve(mode, path, args, env):
import os
send_process_created_message()
return getattr(os, original_name)(mode, path, patch_args(args), env)
return new_spawnve
@ -430,11 +448,13 @@ def create_fork_exec(original_name):
"""
_posixsubprocess.fork_exec(args, executable_list, close_fds, ... (13 more))
"""
def new_fork_exec(args, *other_args):
import _posixsubprocess # @UnresolvedImport
args = patch_args(args)
send_process_created_message()
return getattr(_posixsubprocess, original_name)(args, *other_args)
return new_fork_exec
@ -442,6 +462,7 @@ def create_warn_fork_exec(original_name):
"""
_posixsubprocess.fork_exec(args, executable_list, close_fds, ... (13 more))
"""
def new_warn_fork_exec(*args):
try:
import _posixsubprocess
@ -449,6 +470,7 @@ def create_warn_fork_exec(original_name):
return getattr(_posixsubprocess, original_name)(*args)
except:
pass
return new_warn_fork_exec
@ -456,6 +478,7 @@ def create_CreateProcess(original_name):
"""
CreateProcess(*args, **kwargs)
"""
def new_CreateProcess(app_name, cmd_line, *args):
try:
import _subprocess
@ -463,6 +486,7 @@ def create_CreateProcess(original_name):
import _winapi as _subprocess
send_process_created_message()
return getattr(_subprocess, original_name)(app_name, patch_arg_str_win(cmd_line), *args)
return new_CreateProcess
@ -470,6 +494,7 @@ def create_CreateProcessWarnMultiproc(original_name):
"""
CreateProcess(*args, **kwargs)
"""
def new_CreateProcess(*args):
try:
import _subprocess
@ -477,10 +502,12 @@ def create_CreateProcessWarnMultiproc(original_name):
import _winapi as _subprocess
warn_multiproc()
return getattr(_subprocess, original_name)(*args)
return new_CreateProcess
def create_fork(original_name):
def new_fork():
import os
@ -511,6 +538,7 @@ def create_fork(original_name):
if is_new_python_process:
send_process_created_message()
return child_process
return new_fork
@ -662,6 +690,7 @@ class _NewThreadStartupWithoutTrace:
def __call__(self):
return self.original_func(*self.args, **self.kwargs)
_UseNewThreadStartup = _NewThreadStartupWithTrace
@ -677,6 +706,7 @@ def _get_threading_modules_to_patch():
return threading_modules_to_patch
threading_modules_to_patch = _get_threading_modules_to_patch()

View file

@ -206,11 +206,18 @@ class _PyDevJsonCommandProcessor(object):
arguments = request.arguments # : :type arguments: ContinueArguments
thread_id = arguments.threadId
self.api.request_resume_thread(thread_id)
def on_resumed():
body = {'allThreadsContinued': thread_id == '*'}
response = pydevd_base_schema.build_response(request, kwargs={'body': body})
cmd = NetCommand(CMD_RETURN, 0, response.to_dict(), is_json=True)
py_db.writer.add_command(cmd)
body = {'allThreadsContinued': thread_id == '*'}
response = pydevd_base_schema.build_response(request, kwargs={'body': body})
return NetCommand(CMD_RETURN, 0, response.to_dict(), is_json=True)
# Only send resumed notification when it has actually resumed!
# (otherwise the user could send a continue, receive the notification and then
# request a new pause which would be paused without sending any notification as
# it didn't really run in the first place).
py_db.threads_suspended_single_notification.add_on_resumed_callback(on_resumed)
self.api.request_resume_thread(thread_id)
def on_next_request(self, py_db, request):
'''

View file

@ -306,13 +306,19 @@ class AbstractSingleNotificationBehavior(object):
class ThreadsSuspendedSingleNotification(AbstractSingleNotificationBehavior):
__slots__ = AbstractSingleNotificationBehavior.__slots__ + [
'multi_threads_single_notification', '_py_db']
'multi_threads_single_notification', '_py_db', '_callbacks', '_callbacks_lock']
def __init__(self, py_db):
AbstractSingleNotificationBehavior.__init__(self)
# If True, pydevd will send a single notification when all threads are suspended/resumed.
self.multi_threads_single_notification = False
self._py_db = weakref.ref(py_db)
self._callbacks_lock = threading.Lock()
self._callbacks = []
def add_on_resumed_callback(self, callback):
with self._callbacks_lock:
self._callbacks.append(callback)
@overrides(AbstractSingleNotificationBehavior.send_resume_notification)
def send_resume_notification(self, thread_id):
@ -320,6 +326,13 @@ class ThreadsSuspendedSingleNotification(AbstractSingleNotificationBehavior):
if py_db is not None:
py_db.writer.add_command(py_db.cmd_factory.make_thread_resume_single_notification(thread_id))
with self._callbacks_lock:
callbacks = self._callbacks
self._callbacks = []
for callback in callbacks:
callback()
@overrides(AbstractSingleNotificationBehavior.send_suspend_notification)
def send_suspend_notification(self, thread_id, stop_reason):
py_db = self._py_db()
@ -636,6 +649,10 @@ class PyDB(object):
def multi_threads_single_notification(self, notify):
self._threads_suspended_single_notification.multi_threads_single_notification = notify
@property
def threads_suspended_single_notification(self):
return self._threads_suspended_single_notification
def get_plugin_lazy_init(self):
if self.plugin is None and SUPPORT_PLUGINS:
self.plugin = PluginManager(self)

View file

@ -205,9 +205,15 @@ class ReaderThread(threading.Thread):
frame = sys._getframe().f_back.f_back
frame_info = ''
while frame:
if frame.f_code.co_name in (
'wait_for_message', 'accept_json_message', 'wait_for_json_message', 'wait_for_response'):
frame = frame.f_back
continue
if frame.f_code.co_filename.endswith('debugger_unittest.py'):
frame = frame.f_back
continue
stack_msg = ' -- File "%s", line %s, in %s\n' % (frame.f_code.co_filename, frame.f_lineno, frame.f_code.co_name)
if 'run' == frame.f_code.co_name:
frame_info = stack_msg # Ok, found the writer thread 'run' method (show only that).

View file

@ -0,0 +1,26 @@
import subprocess
import sys
args = [
'connect(\\"127 . 0.0.1\\")',
'"',
'',
'\\',
'\\"',
'""',
]
def main():
retcode = subprocess.call([sys.executable, __file__] + args)
assert retcode == 0
if __name__ == '__main__':
sys_args = sys.argv[1:]
if sys_args:
assert sys_args == args, 'Expected that %r == %r' % (sys_args, args)
print('break here')
print('TEST SUCEEDED!')
else:
main()

View file

@ -2320,6 +2320,67 @@ def test_multiprocessing_with_stopped_breakpoints(case_setup_multiprocessing):
writer.finished_ok = True
@pytest.mark.skipif(not IS_CPYTHON, reason='CPython only test.')
def test_subprocess_quoted_args(case_setup_multiprocessing):
import threading
from tests_python.debugger_unittest import AbstractWriterThread
with case_setup_multiprocessing.test_file('_debugger_case_quoting.py') as writer:
break_subprocess_line = writer.get_line_index_with_content('break here')
writer.write_add_breakpoint(break_subprocess_line)
server_socket = writer.server_socket
class SecondaryProcessWriterThread(AbstractWriterThread):
TEST_FILE = writer.get_main_filename()
_sequence = -1
class SecondaryProcessThreadCommunication(threading.Thread):
def run(self):
from tests_python.debugger_unittest import ReaderThread
# Note: on linux on Python 2 because on Python 2 CPython subprocess.call will actually
# create a fork first (at which point it'll connect) and then, later on it'll
# call the main (as if it was a clean process as if PyDB wasn't created
# the first time -- the debugger will still work, but it'll do an additional
# connection.
expected_connections = 1
if sys.platform != 'win32' and IS_PY2:
expected_connections = 2
for _ in range(expected_connections):
server_socket.listen(1)
self.server_socket = server_socket
new_sock, addr = server_socket.accept()
reader_thread = ReaderThread(new_sock)
reader_thread.name = ' *** Multiprocess Reader Thread'
reader_thread.start()
writer2 = SecondaryProcessWriterThread()
writer2.reader_thread = reader_thread
writer2.sock = new_sock
writer2.write_version()
writer2.write_add_breakpoint(break_subprocess_line)
writer2.write_make_initial_run()
hit = writer2.wait_for_breakpoint_hit()
writer2.write_run_thread(hit.thread_id)
secondary_process_thread_communication = SecondaryProcessThreadCommunication()
secondary_process_thread_communication.start()
writer.write_make_initial_run()
secondary_process_thread_communication.join(10)
if secondary_process_thread_communication.isAlive():
raise AssertionError('The SecondaryProcessThreadCommunication did not finish')
writer.finished_ok = True
@pytest.mark.skipif(not IS_CPYTHON, reason='CPython only test.')
def test_remote_debugger_basic(case_setup_remote):
with case_setup_remote.test_file('_debugger_case_remote.py') as writer:

View file

@ -825,6 +825,7 @@ def test_pause_and_continue(case_setup):
with case_setup.test_file('_debugger_case_pause_continue.py') as writer:
json_facade = JsonFacade(writer)
writer.write_multi_threads_single_notification(True)
writer.write_set_protocol('http_json')
writer.write_add_breakpoint(writer.get_line_index_with_content('Break here'))
@ -939,6 +940,7 @@ def test_stepping(case_setup):
writer.finished_ok = True
def test_evaluate(case_setup):
with case_setup.test_file('_debugger_case_evaluate.py') as writer:
json_facade = JsonFacade(writer)
@ -982,7 +984,6 @@ def test_evaluate(case_setup):
writer.finished_ok = True
@pytest.mark.skipif(IS_JYTHON, reason='Flaky on Jython.')
def test_path_translation_and_source_reference(case_setup):
@ -1057,7 +1058,6 @@ def test_path_translation_and_source_reference(case_setup):
writer.finished_ok = True
@pytest.mark.skipif(IS_JYTHON, reason='Flaky on Jython.')
def test_source_reference_no_file(case_setup, tmpdir):
@ -1281,3 +1281,4 @@ def test_redirect_output(case_setup):
if __name__ == '__main__':
pytest.main(['-k', 'test_case_skipping_filters', '-s'])

View file

@ -32,7 +32,7 @@ class TestCase(unittest.TestCase):
if sys.platform == "win32":
debug_command = debug_command.replace('"', '\\"')
debug_command = '"%s"' % debug_command
self.assertEqual(
'C:\\bin\\python.exe -u -c %s' % debug_command,
pydev_monkey.patch_arg_str_win(check))
@ -96,7 +96,7 @@ class TestCase(unittest.TestCase):
try:
SetupHolder.setup = {'client': '127.0.0.1', 'port': '0'}
check = ['C:\\bin\\python.exe', 'connect(\\"127.0.0.1\\")']
check = ['C:\\bin\\python.exe', 'connect(\\"127.0.0.1\\")', 'with spaces']
from _pydevd_bundle.pydevd_command_line_handling import get_pydevd_file
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe',
@ -106,7 +106,9 @@ class TestCase(unittest.TestCase):
'--client',
'127.0.0.1',
'--file',
'connect(\\"127.0.0.1\\")'])
'"connect(\\\\\\"127.0.0.1\\\\\\")"' if sys.platform == 'win32' else 'connect(\\"127.0.0.1\\")',
'"with spaces"' if sys.platform == 'win32' else 'with spaces',
])
finally:
SetupHolder.setup = original
@ -129,7 +131,6 @@ class TestCase(unittest.TestCase):
try:
SetupHolder.setup = {'client': '127.0.0.1', 'port': '0'}
check = ['C:\\bin\\python.exe', 'target.py', 'connect(\\"127.0.0.1\\")', 'bar']
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe',
get_pydevd_file(),
@ -139,7 +140,7 @@ class TestCase(unittest.TestCase):
'127.0.0.1',
'--file',
'target.py',
'connect(\\"127.0.0.1\\")',
'"connect(\\\\\\"127.0.0.1\\\\\\")"' if sys.platform == 'win32' else 'connect(\\"127.0.0.1\\")',
'bar',
])
finally: