mirror of
https://github.com/microsoft/debugpy.git
synced 2025-12-23 08:48:12 +00:00
parent
8f358d6e0f
commit
679bda4745
10 changed files with 249 additions and 261 deletions
|
|
@ -33,7 +33,9 @@ def start_django(run):
|
|||
if multiprocess:
|
||||
pytest.skip("https://github.com/microsoft/ptvsd/issues/1706")
|
||||
|
||||
# No clean way to kill Django server, expect non-zero exit code
|
||||
session.expected_exit_code = some.int
|
||||
|
||||
session.config.update({"django": True, "subProcess": bool(multiprocess)})
|
||||
|
||||
args = ["runserver"]
|
||||
|
|
@ -184,6 +186,9 @@ def test_django_breakpoint_multiproc(start_django):
|
|||
|
||||
child_pid = parent_session.wait_for_next_subprocess()
|
||||
with debug.Session() as child_session:
|
||||
# TODO: this is wrong, but we don't have multiproc attach
|
||||
# yet, so update this when that is done
|
||||
# https://github.com/microsoft/ptvsd/issues/1776
|
||||
with child_session.attach_by_pid(child_pid):
|
||||
child_session.set_breakpoints(paths.app_py, [bp_line])
|
||||
|
||||
|
|
|
|||
|
|
@ -4,13 +4,14 @@
|
|||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import pytest
|
||||
import sys
|
||||
|
||||
from tests import debug
|
||||
from tests.patterns import some
|
||||
|
||||
|
||||
def test_variables_and_evaluate(pyfile, start_method, run_as):
|
||||
def test_variables_and_evaluate(pyfile, target, run):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
|
@ -20,10 +21,10 @@ def test_variables_and_evaluate(pyfile, start_method, run_as):
|
|||
c = 3
|
||||
print([a, b, c]) # @bp
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
session.configure(run_as, code_to_debug)
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["bp"]])
|
||||
session.start_debugging()
|
||||
with debug.Session() as session:
|
||||
with run(session, target(code_to_debug)):
|
||||
session.set_breakpoints(code_to_debug, all)
|
||||
|
||||
hit = session.wait_for_stop()
|
||||
|
||||
resp_scopes = session.send_request(
|
||||
|
|
@ -102,7 +103,7 @@ def test_variables_and_evaluate(pyfile, start_method, run_as):
|
|||
session.request_continue()
|
||||
|
||||
|
||||
def test_set_variable(pyfile, start_method, run_as):
|
||||
def test_set_variable(pyfile, target, run):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from debug_me import backchannel, ptvsd
|
||||
|
|
@ -111,10 +112,11 @@ def test_set_variable(pyfile, start_method, run_as):
|
|||
ptvsd.break_into_debugger()
|
||||
backchannel.send(a)
|
||||
|
||||
with debug.Session(start_method, backchannel=True) as session:
|
||||
backchannel = session.backchannel
|
||||
session.configure(run_as, code_to_debug)
|
||||
session.start_debugging()
|
||||
with debug.Session() as session:
|
||||
backchannel = session.open_backchannel()
|
||||
with run(session, target(code_to_debug)):
|
||||
pass
|
||||
|
||||
hit = session.wait_for_stop()
|
||||
|
||||
resp_scopes = session.send_request(
|
||||
|
|
@ -156,7 +158,7 @@ def test_set_variable(pyfile, start_method, run_as):
|
|||
assert backchannel.receive() == 1000
|
||||
|
||||
|
||||
def test_variable_sort(pyfile, start_method, run_as):
|
||||
def test_variable_sort(pyfile, target, run):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
|
@ -176,10 +178,9 @@ def test_variable_sort(pyfile, start_method, run_as):
|
|||
d = 3 # noqa
|
||||
print("done") # @bp
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
session.configure(run_as, code_to_debug)
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["bp"]])
|
||||
session.start_debugging()
|
||||
with debug.Session() as session:
|
||||
with run(session, target(code_to_debug)):
|
||||
session.set_breakpoints(code_to_debug, all)
|
||||
hit = session.wait_for_stop()
|
||||
|
||||
resp_scopes = session.send_request(
|
||||
|
|
@ -244,7 +245,8 @@ def test_variable_sort(pyfile, start_method, run_as):
|
|||
session.request_continue()
|
||||
|
||||
|
||||
def test_return_values(pyfile, start_method, run_as):
|
||||
@pytest.mark.parametrize("retval", ("show", ""))
|
||||
def test_return_values(pyfile, target, run, retval):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
|
@ -282,13 +284,11 @@ def test_return_values(pyfile, start_method, run_as):
|
|||
}
|
||||
)
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
session.configure(
|
||||
run_as, code_to_debug,
|
||||
showReturnValue=True
|
||||
)
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["bp"]])
|
||||
session.start_debugging()
|
||||
with debug.Session() as session:
|
||||
session.config["showReturnValue"] = bool(retval)
|
||||
with run(session, target(code_to_debug)):
|
||||
session.set_breakpoints(code_to_debug, all)
|
||||
|
||||
hit = session.wait_for_stop()
|
||||
|
||||
session.send_request("next", {"threadId": hit.thread_id}).wait_for_response()
|
||||
|
|
@ -310,7 +310,10 @@ def test_return_values(pyfile, start_method, run_as):
|
|||
if v["name"].startswith("(return)")
|
||||
)
|
||||
|
||||
assert variables == [expected1]
|
||||
if retval:
|
||||
assert variables == [expected1]
|
||||
else:
|
||||
assert variables == []
|
||||
|
||||
session.send_request("next", {"threadId": hit.thread_id}).wait_for_response()
|
||||
hit = session.wait_for_stop(reason="step")
|
||||
|
|
@ -326,12 +329,15 @@ def test_return_values(pyfile, start_method, run_as):
|
|||
if v["name"].startswith("(return)")
|
||||
)
|
||||
|
||||
assert variables == [expected1, expected2]
|
||||
if retval:
|
||||
assert variables == [expected1, expected2]
|
||||
else:
|
||||
assert variables == []
|
||||
|
||||
session.send_request("continue").wait_for_response()
|
||||
|
||||
|
||||
def test_unicode(pyfile, start_method, run_as):
|
||||
def test_unicode(pyfile, target, run):
|
||||
# On Python 3, variable names can contain Unicode characters.
|
||||
# On Python 2, they must be ASCII, but using a Unicode character in an expression should not crash debugger.
|
||||
|
||||
|
|
@ -345,9 +351,10 @@ def test_unicode(pyfile, start_method, run_as):
|
|||
ptvsd.break_into_debugger()
|
||||
print("break")
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
session.configure(run_as, code_to_debug)
|
||||
session.start_debugging()
|
||||
with debug.Session() as session:
|
||||
with run(session, target(code_to_debug)):
|
||||
pass
|
||||
|
||||
hit = session.wait_for_stop()
|
||||
|
||||
resp_eval = session.send_request(
|
||||
|
|
@ -364,7 +371,7 @@ def test_unicode(pyfile, start_method, run_as):
|
|||
session.request_continue()
|
||||
|
||||
|
||||
def test_hex_numbers(pyfile, start_method, run_as):
|
||||
def test_hex_numbers(pyfile, target, run):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
|
@ -375,10 +382,9 @@ def test_hex_numbers(pyfile, start_method, run_as):
|
|||
d = {(1, 10, 100): (10000, 100000, 100000)}
|
||||
print((a, b, c, d)) # @bp
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
session.configure(run_as, code_to_debug)
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["bp"]])
|
||||
session.start_debugging()
|
||||
with debug.Session() as session:
|
||||
with run(session, target(code_to_debug)):
|
||||
session.set_breakpoints(code_to_debug, all)
|
||||
hit = session.wait_for_stop()
|
||||
|
||||
resp_scopes = session.send_request(
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ str_matching_ArithmeticError = some.str.matching(r"(.+\.)?ArithmeticError")
|
|||
@pytest.mark.parametrize("raised", ["raised", ""])
|
||||
@pytest.mark.parametrize("uncaught", ["uncaught", ""])
|
||||
def test_vsc_exception_options_raise_with_except(
|
||||
pyfile, start_method, run_as, raised, uncaught
|
||||
pyfile, target, run, raised, uncaught
|
||||
):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
|
|
@ -30,13 +30,12 @@ def test_vsc_exception_options_raise_with_except(
|
|||
|
||||
raise_with_except()
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
with debug.Session() as session:
|
||||
session.expected_exit_code = some.int
|
||||
session.configure(run_as, code_to_debug)
|
||||
session.request(
|
||||
"setExceptionBreakpoints", {"filters": list({raised, uncaught} - {""})}
|
||||
)
|
||||
session.start_debugging()
|
||||
with run(session, target(code_to_debug)):
|
||||
session.request(
|
||||
"setExceptionBreakpoints", {"filters": list({raised, uncaught} - {""})}
|
||||
)
|
||||
|
||||
expected = some.dict.containing(
|
||||
{
|
||||
|
|
@ -72,7 +71,7 @@ def test_vsc_exception_options_raise_with_except(
|
|||
@pytest.mark.parametrize("raised", ["raised", ""])
|
||||
@pytest.mark.parametrize("uncaught", ["uncaught", ""])
|
||||
def test_vsc_exception_options_raise_without_except(
|
||||
pyfile, start_method, run_as, raised, uncaught
|
||||
pyfile, target, run, raised, uncaught
|
||||
):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
|
|
@ -83,15 +82,13 @@ def test_vsc_exception_options_raise_without_except(
|
|||
|
||||
raise_without_except()
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
with debug.Session() as session:
|
||||
session.ignore_unobserved.append(Event("stopped"))
|
||||
session.expected_exit_code = some.int
|
||||
session.configure(run_as, code_to_debug)
|
||||
session.request(
|
||||
"setExceptionBreakpoints", {"filters": list({raised, uncaught} - {""})}
|
||||
)
|
||||
session.start_debugging()
|
||||
|
||||
with run(session, target(code_to_debug)):
|
||||
session.request(
|
||||
"setExceptionBreakpoints", {"filters": list({raised, uncaught} - {""})}
|
||||
)
|
||||
expected_exc_info = some.dict.containing(
|
||||
{
|
||||
"exceptionId": str_matching_ArithmeticError,
|
||||
|
|
@ -149,7 +146,7 @@ def test_vsc_exception_options_raise_without_except(
|
|||
@pytest.mark.parametrize("uncaught", ["uncaught", ""])
|
||||
@pytest.mark.parametrize("zero", ["zero", ""])
|
||||
@pytest.mark.parametrize("exit_code", [0, 1, "nan"])
|
||||
def test_systemexit(pyfile, start_method, run_as, raised, uncaught, zero, exit_code):
|
||||
def test_systemexit(pyfile, target, run, raised, uncaught, zero, exit_code):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
|
@ -169,16 +166,12 @@ def test_systemexit(pyfile, start_method, run_as, raised, uncaught, zero, exit_c
|
|||
if uncaught:
|
||||
filters += ["uncaught"]
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
with debug.Session() as session:
|
||||
session.expected_exit_code = some.int
|
||||
session.configure(
|
||||
run_as,
|
||||
code_to_debug,
|
||||
args=[repr(exit_code)],
|
||||
breakOnSystemExitZero=bool(zero),
|
||||
)
|
||||
session.request("setExceptionBreakpoints", {"filters": filters})
|
||||
session.start_debugging()
|
||||
session.config["args"] = [repr(exit_code)]
|
||||
session.config["breakOnSystemExitZero"] = bool(zero)
|
||||
with run(session, target(code_to_debug)):
|
||||
session.request("setExceptionBreakpoints", {"filters": filters})
|
||||
|
||||
# When breaking on raised exceptions, we'll stop on both lines,
|
||||
# unless it's SystemExit(0) and we asked to ignore that.
|
||||
|
|
@ -221,7 +214,7 @@ def test_systemexit(pyfile, start_method, run_as, raised, uncaught, zero, exit_c
|
|||
[], # Add the whole Python Exceptions category.
|
||||
],
|
||||
)
|
||||
def test_raise_exception_options(pyfile, start_method, run_as, exceptions, break_mode):
|
||||
def test_raise_exception_options(pyfile, target, run, exceptions, break_mode):
|
||||
if break_mode in ("never", "unhandled", "userUnhandled"):
|
||||
expect_exceptions = []
|
||||
if break_mode != "never" and (not exceptions or "AssertionError" in exceptions):
|
||||
|
|
@ -257,26 +250,26 @@ def test_raise_exception_options(pyfile, start_method, run_as, exceptions, break
|
|||
except IndexError:
|
||||
pass
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
with debug.Session() as session:
|
||||
session.ignore_unobserved.append(Event("stopped"))
|
||||
session.expected_exit_code = some.int
|
||||
session.configure(run_as, code_to_debug)
|
||||
path = [{"names": ["Python Exceptions"]}]
|
||||
if exceptions:
|
||||
path.append({"names": exceptions})
|
||||
session.request(
|
||||
"setExceptionBreakpoints",
|
||||
{
|
||||
"filters": [], # Unused when exceptionOptions is passed.
|
||||
"exceptionOptions": [
|
||||
{
|
||||
"path": path,
|
||||
"breakMode": break_mode, # Can be "never", "always", "unhandled", "userUnhandled"
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
session.start_debugging()
|
||||
|
||||
with run(session, target(code_to_debug)):
|
||||
session.request(
|
||||
"setExceptionBreakpoints",
|
||||
{
|
||||
"filters": [], # Unused when exceptionOptions is passed.
|
||||
"exceptionOptions": [
|
||||
{
|
||||
"path": path,
|
||||
"breakMode": break_mode, # Can be "never", "always", "unhandled", "userUnhandled"
|
||||
}
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
for expected_exception in expect_exceptions:
|
||||
session.wait_for_stop(
|
||||
|
|
@ -292,7 +285,7 @@ def test_raise_exception_options(pyfile, start_method, run_as, exceptions, break
|
|||
@pytest.mark.parametrize("break_on_system_exit_zero", ["break_on_system_exit_zero", ""])
|
||||
@pytest.mark.parametrize("django", ["django", ""])
|
||||
def test_success_exitcodes(
|
||||
pyfile, start_method, run_as, exit_code, break_on_system_exit_zero, django
|
||||
pyfile, target, run, exit_code, break_on_system_exit_zero, django
|
||||
):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
|
|
@ -303,17 +296,13 @@ def test_success_exitcodes(
|
|||
print("sys.exit(%r)" % (exit_code,))
|
||||
sys.exit(exit_code)
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
with debug.Session() as session:
|
||||
session.expected_exit_code = some.int
|
||||
session.configure(
|
||||
run_as,
|
||||
code_to_debug,
|
||||
args=[repr(exit_code)],
|
||||
breakOnSystemExitZero=bool(break_on_system_exit_zero),
|
||||
django=bool(django),
|
||||
)
|
||||
session.request("setExceptionBreakpoints", {"filters": ["uncaught"]})
|
||||
session.start_debugging()
|
||||
session.config["args"] = [repr(exit_code)]
|
||||
session.config["breakOnSystemExitZero"] = bool(break_on_system_exit_zero)
|
||||
session.config["django"] = bool(django)
|
||||
with run(session, target(code_to_debug)):
|
||||
session.request("setExceptionBreakpoints", {"filters": ["uncaught"]})
|
||||
|
||||
if break_on_system_exit_zero or (not django and exit_code == 3):
|
||||
# If "breakOnSystemExitZero" was specified, we should always break.
|
||||
|
|
@ -325,7 +314,7 @@ def test_success_exitcodes(
|
|||
|
||||
|
||||
@pytest.mark.parametrize("max_frames", ["default", "all", 10])
|
||||
def test_exception_stack(pyfile, start_method, run_as, max_frames):
|
||||
def test_exception_stack(pyfile, target, run, max_frames):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
|
@ -356,11 +345,11 @@ def test_exception_stack(pyfile, start_method, run_as, max_frames):
|
|||
max_expected_lines = 21
|
||||
maxFrames = 10
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
with debug.Session() as session:
|
||||
session.expected_exit_code = some.int
|
||||
session.configure(run_as, code_to_debug, maxExceptionStackFrames=maxFrames)
|
||||
session.request("setExceptionBreakpoints", {"filters": ["uncaught"]})
|
||||
session.start_debugging()
|
||||
session.config["maxExceptionStackFrames"] = maxFrames
|
||||
with run(session, target(code_to_debug)):
|
||||
session.request("setExceptionBreakpoints", {"filters": ["uncaught"]})
|
||||
|
||||
stop = session.wait_for_stop(
|
||||
"exception",
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ from tests.patterns import some
|
|||
@pytest.mark.parametrize("scenario", ["exclude_by_name", "exclude_by_dir"])
|
||||
@pytest.mark.parametrize("exc_type", ["RuntimeError", "SystemExit"])
|
||||
def test_exceptions_and_exclude_rules(
|
||||
pyfile, start_method, run_as, scenario, exc_type
|
||||
pyfile, target, run, scenario, exc_type
|
||||
):
|
||||
if exc_type == "RuntimeError":
|
||||
|
||||
|
|
@ -43,18 +43,18 @@ def test_exceptions_and_exclude_rules(
|
|||
pytest.fail(scenario)
|
||||
log.info("Rules: {0!j}", rules)
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
session.configure(run_as, code_to_debug, rules=rules)
|
||||
session.request(
|
||||
"setExceptionBreakpoints", {"filters": ["raised", "uncaught"]}
|
||||
)
|
||||
session.start_debugging()
|
||||
with debug.Session() as session:
|
||||
session.config["rules"] = rules
|
||||
with run(session, target(code_to_debug)):
|
||||
session.request(
|
||||
"setExceptionBreakpoints", {"filters": ["raised", "uncaught"]}
|
||||
)
|
||||
|
||||
# No exceptions should be seen.
|
||||
|
||||
|
||||
@pytest.mark.parametrize("scenario", ["exclude_code_to_debug", "exclude_callback_dir"])
|
||||
def test_exceptions_and_partial_exclude_rules(pyfile, start_method, run_as, scenario):
|
||||
def test_exceptions_and_partial_exclude_rules(pyfile, target, run, scenario):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from debug_me import backchannel
|
||||
|
|
@ -83,13 +83,13 @@ def test_exceptions_and_partial_exclude_rules(pyfile, start_method, run_as, scen
|
|||
pytest.fail(scenario)
|
||||
log.info("Rules: {0!j}", rules)
|
||||
|
||||
with debug.Session(start_method, backchannel=True) as session:
|
||||
backchannel = session.backchannel
|
||||
session.configure(run_as, code_to_debug, rules=rules)
|
||||
session.request(
|
||||
"setExceptionBreakpoints", {"filters": ["raised", "uncaught"]}
|
||||
)
|
||||
session.start_debugging()
|
||||
with debug.Session() as session:
|
||||
backchannel = session.open_backchannel()
|
||||
session.config["rules"] = rules
|
||||
with run(session, target(code_to_debug)):
|
||||
session.request(
|
||||
"setExceptionBreakpoints", {"filters": ["raised", "uncaught"]}
|
||||
)
|
||||
backchannel.send(call_me_back_dir)
|
||||
|
||||
if scenario == "exclude_code_to_debug":
|
||||
|
|
|
|||
|
|
@ -10,12 +10,12 @@ import sys
|
|||
|
||||
from ptvsd.common import compat
|
||||
from tests import code, debug, log, net, test_data
|
||||
from tests.debug import runners
|
||||
from tests.debug import runners, targets
|
||||
from tests.patterns import some
|
||||
|
||||
pytestmark = pytest.mark.timeout(60)
|
||||
|
||||
flask = net.WebServer(net.get_test_server_port(7000, 7100))
|
||||
flask_server = net.WebServer(net.get_test_server_port(7000, 7100))
|
||||
|
||||
|
||||
class paths:
|
||||
|
|
@ -29,56 +29,56 @@ class lines:
|
|||
app_py = code.get_marked_line_numbers(paths.app_py)
|
||||
|
||||
|
||||
def _initialize_session(session, multiprocess=None, exit_code=0):
|
||||
if multiprocess:
|
||||
pytest.skip("https://github.com/microsoft/ptvsd/issues/1706")
|
||||
@pytest.fixture
|
||||
@pytest.mark.parametrize("run", [runners.launch, runners.attach_by_socket["cli"]])
|
||||
def start_flask(run):
|
||||
def start(session, multiprocess=False):
|
||||
if multiprocess:
|
||||
pytest.skip("https://github.com/microsoft/ptvsd/issues/1706")
|
||||
|
||||
env = {
|
||||
"FLASK_APP": paths.app_py,
|
||||
"FLASK_ENV": "development",
|
||||
"FLASK_DEBUG": "1" if multiprocess else "0",
|
||||
}
|
||||
if platform.system() != "Windows":
|
||||
locale = "en_US.utf8" if platform.system() == "Linux" else "en_US.UTF-8"
|
||||
env.update({"LC_ALL": locale, "LANG": locale})
|
||||
# No clean way to kill Flask server, expect non-zero exit code
|
||||
session.expected_exit_code = some.int
|
||||
|
||||
args = ["run"]
|
||||
if not multiprocess:
|
||||
args += ["--no-debugger", "--no-reload", "--with-threads"]
|
||||
args += ["--port", str(flask.port)]
|
||||
session.config.env.update({
|
||||
"FLASK_APP": paths.app_py,
|
||||
"FLASK_ENV": "development",
|
||||
"FLASK_DEBUG": "1" if multiprocess else "0",
|
||||
})
|
||||
if platform.system() != "Windows":
|
||||
locale = "en_US.utf8" if platform.system() == "Linux" else "en_US.UTF-8"
|
||||
session.config.env.update({"LC_ALL": locale, "LANG": locale})
|
||||
|
||||
session.expected_exit_code = exit_code
|
||||
session.configure(
|
||||
"module",
|
||||
"flask",
|
||||
cwd=paths.flask1,
|
||||
jinja=True,
|
||||
subProcess=multiprocess,
|
||||
args=args,
|
||||
env=env,
|
||||
)
|
||||
session.config.update({
|
||||
"jinja": True,
|
||||
"subProcess": bool(multiprocess),
|
||||
})
|
||||
|
||||
args = ["run"]
|
||||
if not multiprocess:
|
||||
args += ["--no-debugger", "--no-reload", "--with-threads"]
|
||||
args += ["--port", str(flask_server.port)]
|
||||
|
||||
|
||||
|
||||
return run(session, targets.Module("flask", args), cwd=paths.flask1)
|
||||
|
||||
return start
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"start_method", [runners.launch, runners.attach_by_socket["cli"]]
|
||||
)
|
||||
@pytest.mark.parametrize("bp_target", ["code", "template"])
|
||||
def test_flask_breakpoint_no_multiproc(start_method, bp_target):
|
||||
def test_flask_breakpoint_no_multiproc(start_flask, bp_target):
|
||||
bp_file, bp_line, bp_name = {
|
||||
"code": (paths.app_py, lines.app_py["bphome"], "home"),
|
||||
"template": (paths.hello_html, 8, "template"),
|
||||
}[bp_target]
|
||||
bp_var_content = compat.force_str("Flask-Jinja-Test")
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
_initialize_session(
|
||||
session, exit_code=some.int
|
||||
) # No clean way to kill Flask server
|
||||
session.set_breakpoints(bp_file, [bp_line])
|
||||
session.start_debugging()
|
||||
with debug.Session() as session:
|
||||
with start_flask(session):
|
||||
session.set_breakpoints(bp_file, [bp_line])
|
||||
|
||||
with flask:
|
||||
home_request = flask.get("/")
|
||||
with flask_server:
|
||||
home_request = flask_server.get("/")
|
||||
session.wait_for_stop(
|
||||
"breakpoint",
|
||||
expected_frames=[
|
||||
|
|
@ -102,19 +102,13 @@ def test_flask_breakpoint_no_multiproc(start_method, bp_target):
|
|||
assert bp_var_content in home_request.response_text()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"start_method", [runners.launch, runners.attach_by_socket["cli"]]
|
||||
)
|
||||
def test_flask_template_exception_no_multiproc(start_method):
|
||||
with debug.Session(start_method) as session:
|
||||
_initialize_session(
|
||||
session, exit_code=some.int
|
||||
) # No clean way to kill Flask server
|
||||
session.request("setExceptionBreakpoints", {"filters": ["raised", "uncaught"]})
|
||||
session.start_debugging()
|
||||
def test_flask_template_exception_no_multiproc(start_flask):
|
||||
with debug.Session() as session:
|
||||
with start_flask(session):
|
||||
session.request("setExceptionBreakpoints", {"filters": ["raised", "uncaught"]})
|
||||
|
||||
with flask:
|
||||
flask.get("/badtemplate")
|
||||
with flask_server:
|
||||
flask_server.get("/badtemplate")
|
||||
stop = session.wait_for_stop(
|
||||
"exception",
|
||||
expected_frames=[
|
||||
|
|
@ -159,22 +153,16 @@ def test_flask_template_exception_no_multiproc(start_method):
|
|||
session.request_continue()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"start_method", [runners.launch, runners.attach_by_socket["cli"]]
|
||||
)
|
||||
@pytest.mark.parametrize("exc_type", ["handled", "unhandled"])
|
||||
def test_flask_exception_no_multiproc(start_method, exc_type):
|
||||
def test_flask_exception_no_multiproc(start_flask, exc_type):
|
||||
exc_line = lines.app_py["exc_" + exc_type]
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
_initialize_session(
|
||||
session, exit_code=some.int
|
||||
) # No clean way to kill Flask server
|
||||
session.request("setExceptionBreakpoints", {"filters": ["raised", "uncaught"]})
|
||||
session.start_debugging()
|
||||
with debug.Session() as session:
|
||||
with start_flask(session):
|
||||
session.request("setExceptionBreakpoints", {"filters": ["raised", "uncaught"]})
|
||||
|
||||
with flask:
|
||||
flask.get("/" + exc_type)
|
||||
with flask_server:
|
||||
flask_server.get("/" + exc_type)
|
||||
stopped = session.wait_for_stop(
|
||||
"exception",
|
||||
expected_frames=[
|
||||
|
|
@ -213,22 +201,24 @@ def test_flask_exception_no_multiproc(start_method, exc_type):
|
|||
session.request_continue()
|
||||
|
||||
|
||||
def test_flask_breakpoint_multiproc():
|
||||
def test_flask_breakpoint_multiproc(start_flask):
|
||||
bp_line = lines.app_py["bphome"]
|
||||
bp_var_content = compat.force_str("Flask-Jinja-Test")
|
||||
|
||||
with debug.Session(runners.launch) as parent_session:
|
||||
# No clean way to kill Flask server
|
||||
_initialize_session(parent_session, multiprocess=True, exit_code=some.int)
|
||||
parent_session.set_breakpoints(paths.app_py, [bp_line])
|
||||
parent_session.start_debugging()
|
||||
with debug.Session() as parent_session:
|
||||
with start_flask(parent_session, multiprocess=True):
|
||||
parent_session.set_breakpoints(paths.app_py, [bp_line])
|
||||
|
||||
with parent_session.attach_to_next_subprocess() as child_session:
|
||||
child_session.set_breakpoints(paths.app_py, [bp_line])
|
||||
child_session.start_debugging()
|
||||
child_pid = parent_session.wait_for_next_subprocess()
|
||||
with debug.Session() as child_session:
|
||||
# TODO: this is wrong, but we don't have multiproc attach
|
||||
# yet, so update this when that is done
|
||||
# https://github.com/microsoft/ptvsd/issues/1776
|
||||
with child_session.attach_by_pid(child_pid):
|
||||
child_session.set_breakpoints(paths.app_py, [bp_line])
|
||||
|
||||
with flask:
|
||||
home_request = flask.get("/")
|
||||
with flask_server:
|
||||
home_request = flask_server.get("/")
|
||||
child_session.wait_for_stop(
|
||||
"breakpoint",
|
||||
expected_frames=[
|
||||
|
|
|
|||
|
|
@ -11,18 +11,18 @@ from tests.patterns import some
|
|||
|
||||
|
||||
@pytest.mark.parametrize("jmc", ["jmc", ""])
|
||||
def test_justmycode_frames(pyfile, start_method, run_as, jmc):
|
||||
def test_justmycode_frames(pyfile, target, run, jmc):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
||||
print("break here") # @bp
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
session.configure(run_as, code_to_debug, justMyCode=bool(jmc))
|
||||
session.set_breakpoints(code_to_debug, all)
|
||||
with debug.Session() as session:
|
||||
session.config["justMyCode"] = bool(jmc)
|
||||
with run(session, target(code_to_debug)):
|
||||
session.set_breakpoints(code_to_debug, all)
|
||||
|
||||
session.start_debugging()
|
||||
stop = session.wait_for_stop(
|
||||
"breakpoint",
|
||||
expected_frames=[
|
||||
|
|
|
|||
|
|
@ -15,17 +15,16 @@ from tests import debug
|
|||
# sequentially, by the time we get to "stopped", we also have all the output events.
|
||||
|
||||
|
||||
def test_with_no_output(pyfile, start_method, run_as):
|
||||
def test_with_no_output(pyfile, target, run):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
() # @wait_for_output
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
session.configure(run_as, code_to_debug)
|
||||
session.set_breakpoints(code_to_debug, all)
|
||||
with debug.Session() as session:
|
||||
with run(session, target(code_to_debug)):
|
||||
session.set_breakpoints(code_to_debug, all)
|
||||
|
||||
session.start_debugging()
|
||||
session.wait_for_stop("breakpoint")
|
||||
session.request_continue()
|
||||
|
||||
|
|
@ -35,7 +34,7 @@ def test_with_no_output(pyfile, start_method, run_as):
|
|||
assert not session.captured_stderr()
|
||||
|
||||
|
||||
def test_with_tab_in_output(pyfile, start_method, run_as):
|
||||
def test_with_tab_in_output(pyfile, target, run):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
|
@ -44,11 +43,9 @@ def test_with_tab_in_output(pyfile, start_method, run_as):
|
|||
print(a)
|
||||
() # @wait_for_output
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
session.configure(run_as, code_to_debug)
|
||||
|
||||
session.set_breakpoints(code_to_debug, all)
|
||||
session.start_debugging()
|
||||
with debug.Session() as session:
|
||||
with run(session, target(code_to_debug)):
|
||||
session.set_breakpoints(code_to_debug, all)
|
||||
session.wait_for_stop()
|
||||
session.request_continue()
|
||||
|
||||
|
|
@ -56,7 +53,7 @@ def test_with_tab_in_output(pyfile, start_method, run_as):
|
|||
|
||||
|
||||
@pytest.mark.parametrize("redirect", ["enabled", "disabled"])
|
||||
def test_redirect_output(pyfile, start_method, run_as, redirect):
|
||||
def test_redirect_output(pyfile, target, run, redirect):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
|
@ -66,11 +63,10 @@ def test_redirect_output(pyfile, start_method, run_as, redirect):
|
|||
|
||||
() # @wait_for_output
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
session.configure(run_as, code_to_debug, redirectOutput=(redirect == "enabled"))
|
||||
session.set_breakpoints(code_to_debug, all)
|
||||
session.start_debugging()
|
||||
|
||||
with debug.Session() as session:
|
||||
session.config["redirectOutput"] = (redirect == "enabled")
|
||||
with run(session, target(code_to_debug)):
|
||||
session.set_breakpoints(code_to_debug, all)
|
||||
session.wait_for_stop()
|
||||
session.request_continue()
|
||||
|
||||
|
|
|
|||
|
|
@ -17,6 +17,9 @@ from tests.patterns import some
|
|||
def test_client_ide_from_path_mapping_linux_backend(
|
||||
pyfile, start_method, run_as, os_type
|
||||
):
|
||||
pytest.skip()
|
||||
# This test needs to be redone after debug_options is removed
|
||||
# TODO: https://github.com/microsoft/ptvsd/issues/1770
|
||||
"""
|
||||
Test simulating that the backend is on Linux and the client is on Windows
|
||||
(automatically detect it from the path mapping).
|
||||
|
|
@ -64,7 +67,7 @@ def test_client_ide_from_path_mapping_linux_backend(
|
|||
session.request_continue()
|
||||
|
||||
|
||||
def test_with_dot_remote_root(pyfile, long_tmpdir, start_method, run_as):
|
||||
def test_with_dot_remote_root(pyfile, long_tmpdir, target, run):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from debug_me import backchannel
|
||||
|
|
@ -73,24 +76,24 @@ def test_with_dot_remote_root(pyfile, long_tmpdir, start_method, run_as):
|
|||
backchannel.send(os.path.abspath(__file__))
|
||||
print("done") # @bp
|
||||
|
||||
path_local = long_tmpdir.mkdir("local") / "code_to_debug.py"
|
||||
path_remote = long_tmpdir.mkdir("remote") / "code_to_debug.py"
|
||||
dir_local = long_tmpdir.mkdir("local")
|
||||
dir_remote = long_tmpdir.mkdir("remote")
|
||||
|
||||
dir_local = path_local.dirname
|
||||
dir_remote = path_remote.dirname
|
||||
path_local = dir_local / "code_to_debug.py"
|
||||
path_remote = dir_remote / "code_to_debug.py"
|
||||
|
||||
code_to_debug.copy(path_local)
|
||||
code_to_debug.copy(path_remote)
|
||||
|
||||
with debug.Session(start_method, backchannel=True) as session:
|
||||
backchannel = session.backchannel
|
||||
session.configure(
|
||||
run_as, path_remote,
|
||||
cwd=dir_remote,
|
||||
pathMappings=[{"localRoot": dir_local, "remoteRoot": "."}],
|
||||
)
|
||||
session.set_breakpoints(path_local, all)
|
||||
session.start_debugging()
|
||||
with debug.Session() as session:
|
||||
backchannel = session.open_backchannel()
|
||||
session.config["pathMappings"] = [{"localRoot": dir_local, "remoteRoot": "."}]
|
||||
|
||||
# Run using remote path
|
||||
with run(session, target(path_remote), cwd=dir_remote):
|
||||
# Set breakpoints using local path. This ensures that
|
||||
# local paths are mapped to remote paths.
|
||||
session.set_breakpoints(path_local, all)
|
||||
|
||||
actual_path_remote = backchannel.receive()
|
||||
assert some.path(actual_path_remote) == path_remote
|
||||
|
|
@ -108,7 +111,7 @@ def test_with_dot_remote_root(pyfile, long_tmpdir, start_method, run_as):
|
|||
session.request_continue()
|
||||
|
||||
|
||||
def test_with_path_mappings(pyfile, long_tmpdir, start_method, run_as):
|
||||
def test_with_path_mappings(pyfile, long_tmpdir, target, run):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from debug_me import backchannel
|
||||
|
|
@ -139,14 +142,15 @@ def test_with_path_mappings(pyfile, long_tmpdir, start_method, run_as):
|
|||
call_me_back_dir = test_data / "call_me_back"
|
||||
call_me_back_py = call_me_back_dir / "call_me_back.py"
|
||||
|
||||
with debug.Session(start_method, backchannel=True) as session:
|
||||
backchannel = session.backchannel
|
||||
session.configure(
|
||||
run_as, path_remote,
|
||||
pathMappings=[{"localRoot": dir_local, "remoteRoot": dir_remote}],
|
||||
)
|
||||
session.set_breakpoints(path_local, ["bp"])
|
||||
session.start_debugging()
|
||||
with debug.Session() as session:
|
||||
backchannel = session.open_backchannel()
|
||||
session.config["pathMappings"] = [{"localRoot": dir_local, "remoteRoot": dir_remote}]
|
||||
|
||||
# Run using remote path
|
||||
with run(session, target(path_remote)):
|
||||
# Set breakpoints using local path. This ensures that
|
||||
# local paths are mapped to remote paths.
|
||||
session.set_breakpoints(path_local, ["bp"])
|
||||
|
||||
actual_path_remote = backchannel.receive()
|
||||
assert some.path(actual_path_remote) == path_remote
|
||||
|
|
|
|||
|
|
@ -8,19 +8,19 @@ from tests import debug
|
|||
from tests.patterns import some
|
||||
|
||||
|
||||
def test_set_expression(pyfile, start_method, run_as):
|
||||
def test_set_expression(pyfile, target, run):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from debug_me import backchannel, ptvsd
|
||||
from debug_me import backchannel
|
||||
|
||||
a = 1
|
||||
ptvsd.break_into_debugger()
|
||||
backchannel.send(a)
|
||||
backchannel.send(a) # @bp
|
||||
|
||||
with debug.Session() as session:
|
||||
backchannel = session.open_backchannel()
|
||||
with run(session, target(code_to_debug)):
|
||||
session.set_breakpoints(code_to_debug, all)
|
||||
|
||||
with debug.Session(start_method, backchannel=True) as session:
|
||||
backchannel = session.backchannel
|
||||
session.configure(run_as, code_to_debug)
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_stop()
|
||||
|
||||
resp_scopes = session.send_request(
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ from tests import debug
|
|||
from tests.patterns import some
|
||||
|
||||
|
||||
def test_with_path_mappings(pyfile, tmpdir, start_method, run_as):
|
||||
def test_with_path_mappings(pyfile, tmpdir, target, run):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
|
@ -59,9 +59,7 @@ def test_with_path_mappings(pyfile, tmpdir, start_method, run_as):
|
|||
exec(code["cell2"], {})
|
||||
print("ok")
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
session.configure(run_as, code_to_debug)
|
||||
|
||||
with debug.Session() as session:
|
||||
map_to_cell_1_line2 = code_to_debug.lines["map_to_cell1_line_2"]
|
||||
map_to_cell_2_line2 = code_to_debug.lines["map_to_cell2_line_2"]
|
||||
|
||||
|
|
@ -71,31 +69,31 @@ def test_with_path_mappings(pyfile, tmpdir, start_method, run_as):
|
|||
source_entry = source_entry[0].lower() + source_entry[1:].upper()
|
||||
source_entry = source_entry.replace("\\", "/")
|
||||
|
||||
# Set breakpoints first and the map afterwards to make sure that it's reapplied.
|
||||
session.set_breakpoints(code_to_debug, [map_to_cell_1_line2])
|
||||
with run(session, target(code_to_debug)):
|
||||
# Set breakpoints first and the map afterwards to make sure that it's reapplied.
|
||||
session.set_breakpoints(code_to_debug, [map_to_cell_1_line2])
|
||||
|
||||
session.request(
|
||||
"setPydevdSourceMap",
|
||||
{
|
||||
"source": {"path": source_entry},
|
||||
"pydevdSourceMaps": [
|
||||
{
|
||||
"line": map_to_cell_1_line2,
|
||||
"endLine": map_to_cell_1_line2 + 1,
|
||||
"runtimeSource": {"path": "<cell1>"},
|
||||
"runtimeLine": 2,
|
||||
},
|
||||
{
|
||||
"line": map_to_cell_2_line2,
|
||||
"endLine": map_to_cell_2_line2 + 1,
|
||||
"runtimeSource": {"path": "<cell2>"},
|
||||
"runtimeLine": 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
)
|
||||
session.request(
|
||||
"setPydevdSourceMap",
|
||||
{
|
||||
"source": {"path": source_entry},
|
||||
"pydevdSourceMaps": [
|
||||
{
|
||||
"line": map_to_cell_1_line2,
|
||||
"endLine": map_to_cell_1_line2 + 1,
|
||||
"runtimeSource": {"path": "<cell1>"},
|
||||
"runtimeLine": 2,
|
||||
},
|
||||
{
|
||||
"line": map_to_cell_2_line2,
|
||||
"endLine": map_to_cell_2_line2 + 1,
|
||||
"runtimeSource": {"path": "<cell2>"},
|
||||
"runtimeLine": 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
session.start_debugging()
|
||||
session.wait_for_stop(
|
||||
"breakpoint",
|
||||
expected_frames=[some.dap.frame(code_to_debug, line=map_to_cell_1_line2)],
|
||||
|
|
@ -118,7 +116,7 @@ def test_with_path_mappings(pyfile, tmpdir, start_method, run_as):
|
|||
},
|
||||
)
|
||||
|
||||
session.request("continue")
|
||||
session.request_continue()
|
||||
session.wait_for_stop(
|
||||
"breakpoint",
|
||||
expected_frames=[some.dap.frame(code_to_debug, line=map_to_cell_2_line2)],
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue