mirror of
https://github.com/microsoft/debugpy.git
synced 2025-12-23 08:48:12 +00:00
Test refactoring migrated to new api (#1544)
* Update ANY to some * Refactor breakpoint lines in tests * Replace old import and emable debug with debug_me * Remove unused code * Fix some linter issues * Change from some.such_that to some.str.such_that * Linting with black
This commit is contained in:
parent
f71f0a5b0a
commit
8d75d6d819
35 changed files with 2554 additions and 2234 deletions
|
|
@ -14,6 +14,5 @@ exclude = '''
|
|||
| ^/src/ptvsd/_version.py
|
||||
| ^/src/ptvsd/common
|
||||
| ^/src/ptvsd/server
|
||||
| ^/tests
|
||||
)
|
||||
'''
|
||||
|
|
|
|||
|
|
@ -465,56 +465,6 @@ class VariablesSorter(object):
|
|||
return self.variables + self.single_underscore + self.double_underscore + self.dunder # noqa
|
||||
|
||||
|
||||
class InternalsFilter(object):
|
||||
"""Identifies debugger internal artifacts.
|
||||
"""
|
||||
# TODO: Move the internal thread identifier here
|
||||
|
||||
def __init__(self):
|
||||
if platform.system() == 'Windows':
|
||||
self._init_windows()
|
||||
else:
|
||||
self._init_default()
|
||||
|
||||
def _init_default(self):
|
||||
self._ignore_files = [
|
||||
'/ptvsd_launcher.py',
|
||||
]
|
||||
|
||||
self._ignore_path_prefixes = [
|
||||
os.path.dirname(os.path.abspath(ptvsd.__file__)),
|
||||
]
|
||||
|
||||
def _init_windows(self):
|
||||
self._init_default()
|
||||
files = []
|
||||
for f in self._ignore_files:
|
||||
files.append(f.lower())
|
||||
self._ignore_files = files
|
||||
|
||||
prefixes = []
|
||||
for p in self._ignore_path_prefixes:
|
||||
prefixes.append(p.lower())
|
||||
self._ignore_path_prefixes = prefixes
|
||||
|
||||
def is_internal_path(self, abs_file_path):
|
||||
# TODO: Remove replace('\\', '/') after the path mapping in pydevd
|
||||
# is fixed. Currently if the client is windows and server is linux
|
||||
# the path separators used are windows path separators for linux
|
||||
# source paths.
|
||||
is_windows = platform.system() == 'Windows'
|
||||
|
||||
file_path = abs_file_path.lower() if is_windows else abs_file_path
|
||||
file_path = file_path.replace('\\', '/')
|
||||
for f in self._ignore_files:
|
||||
if file_path.endswith(f):
|
||||
return True
|
||||
for prefix in self._ignore_path_prefixes:
|
||||
prefix_path = prefix.replace('\\', '/')
|
||||
if file_path.startswith(prefix_path):
|
||||
return True
|
||||
return False
|
||||
|
||||
########################
|
||||
# the debug config
|
||||
|
||||
|
|
@ -1039,7 +989,6 @@ class VSCodeMessageProcessor(VSCLifecycleMsgProcessor):
|
|||
|
||||
# debugger state
|
||||
self._success_exitcodes = []
|
||||
self.internals_filter = InternalsFilter()
|
||||
|
||||
# adapter state
|
||||
self._detached = False
|
||||
|
|
|
|||
|
|
@ -45,11 +45,6 @@ class Some(object):
|
|||
"""
|
||||
return Either(self, pattern)
|
||||
|
||||
def such_that(self, condition):
|
||||
"""Same pattern, but it only matches if condition() is true.
|
||||
"""
|
||||
return SuchThat(self, condition)
|
||||
|
||||
def in_range(self, start, stop):
|
||||
"""Same pattern, but it only matches if the start <= value < stop.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -55,19 +55,19 @@ class JsonMemoryStream(object):
|
|||
|
||||
|
||||
class TestJsonIOStream(object):
|
||||
MESSAGE_BODY_TEMPLATE = u'{"arguments": {"threadId": 3}, "command": "next", "seq": %d, "type": "request"}'
|
||||
MESSAGE_BODY_TEMPLATE = '{"arguments": {"threadId": 3}, "command": "next", "seq": %d, "type": "request"}'
|
||||
MESSAGES = []
|
||||
SERIALIZED_MESSAGES = b''
|
||||
SERIALIZED_MESSAGES = b""
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
for seq in range(0, 3):
|
||||
message_body = cls.MESSAGE_BODY_TEMPLATE % seq
|
||||
message = json.loads(message_body)
|
||||
message_body = message_body.encode('utf-8')
|
||||
message_body = message_body.encode("utf-8")
|
||||
cls.MESSAGES.append(message)
|
||||
message_header = u'Content-Length: %d\r\n\r\n' % len(message_body)
|
||||
cls.SERIALIZED_MESSAGES += message_header.encode('ascii') + message_body
|
||||
message_header = "Content-Length: %d\r\n\r\n" % len(message_body)
|
||||
cls.SERIALIZED_MESSAGES += message_header.encode("ascii") + message_body
|
||||
|
||||
def test_read(self):
|
||||
data = io.BytesIO(self.SERIALIZED_MESSAGES)
|
||||
|
|
@ -89,8 +89,8 @@ class TestJsonIOStream(object):
|
|||
|
||||
class TestJsonMemoryStream(object):
|
||||
MESSAGES = [
|
||||
{'seq': 1, 'type': 'request', 'command': 'next', 'arguments': {'threadId': 3}},
|
||||
{'seq': 2, 'type': 'request', 'command': 'next', 'arguments': {'threadId': 5}},
|
||||
{"seq": 1, "type": "request", "command": "next", "arguments": {"threadId": 3}},
|
||||
{"seq": 2, "type": "request", "command": "next", "arguments": {"threadId": 5}},
|
||||
]
|
||||
|
||||
def test_read(self):
|
||||
|
|
@ -116,23 +116,35 @@ class TestJsonMessageChannel(object):
|
|||
when the returned iterator is exhausted.
|
||||
"""
|
||||
exhausted = threading.Event()
|
||||
|
||||
def iterate():
|
||||
for x in collection:
|
||||
yield x
|
||||
exhausted.set()
|
||||
|
||||
return iterate(), exhausted
|
||||
|
||||
def test_events(self):
|
||||
EVENTS = [
|
||||
{'seq': 1, 'type': 'event', 'event': 'stopped', 'body': {'reason': 'pause'}},
|
||||
{'seq': 2, 'type': 'event', 'event': 'unknown', 'body': {'something': 'else'}},
|
||||
{
|
||||
"seq": 1,
|
||||
"type": "event",
|
||||
"event": "stopped",
|
||||
"body": {"reason": "pause"},
|
||||
},
|
||||
{
|
||||
"seq": 2,
|
||||
"type": "event",
|
||||
"event": "unknown",
|
||||
"body": {"something": "else"},
|
||||
},
|
||||
]
|
||||
|
||||
events_received = []
|
||||
|
||||
class Handlers(object):
|
||||
def stopped_event(self, event):
|
||||
assert event.event == 'stopped'
|
||||
assert event.event == "stopped"
|
||||
events_received.append((event.channel, event.body))
|
||||
|
||||
def event(self, event):
|
||||
|
|
@ -145,33 +157,50 @@ class TestJsonMessageChannel(object):
|
|||
input_exhausted.wait()
|
||||
|
||||
assert events_received == [
|
||||
(channel, EVENTS[0]['body']),
|
||||
(channel, 'unknown', EVENTS[1]['body']),
|
||||
(channel, EVENTS[0]["body"]),
|
||||
(channel, "unknown", EVENTS[1]["body"]),
|
||||
]
|
||||
|
||||
def test_requests(self):
|
||||
REQUESTS = [
|
||||
{'seq': 1, 'type': 'request', 'command': 'next', 'arguments': {'threadId': 3}},
|
||||
{'seq': 2, 'type': 'request', 'command': 'unknown', 'arguments': {'answer': 42}},
|
||||
{'seq': 3, 'type': 'request', 'command': 'pause', 'arguments': {'threadId': 5}},
|
||||
{
|
||||
"seq": 1,
|
||||
"type": "request",
|
||||
"command": "next",
|
||||
"arguments": {"threadId": 3},
|
||||
},
|
||||
{
|
||||
"seq": 2,
|
||||
"type": "request",
|
||||
"command": "unknown",
|
||||
"arguments": {"answer": 42},
|
||||
},
|
||||
{
|
||||
"seq": 3,
|
||||
"type": "request",
|
||||
"command": "pause",
|
||||
"arguments": {"threadId": 5},
|
||||
},
|
||||
]
|
||||
|
||||
requests_received = []
|
||||
|
||||
class Handlers(object):
|
||||
def next_request(self, request):
|
||||
assert request.command == 'next'
|
||||
assert request.command == "next"
|
||||
requests_received.append((request.channel, request.arguments))
|
||||
return {'threadId': 7}
|
||||
return {"threadId": 7}
|
||||
|
||||
def request(self, request):
|
||||
requests_received.append((request.channel, request.command, request.arguments))
|
||||
requests_received.append(
|
||||
(request.channel, request.command, request.arguments)
|
||||
)
|
||||
return {}
|
||||
|
||||
def pause_request(self, request):
|
||||
assert request.command == 'pause'
|
||||
assert request.command == "pause"
|
||||
requests_received.append((request.channel, request.arguments))
|
||||
request.cant_handle('pause error')
|
||||
request.cant_handle("pause error")
|
||||
|
||||
input, input_exhausted = self.iter_with_event(REQUESTS)
|
||||
output = []
|
||||
|
|
@ -181,15 +210,35 @@ class TestJsonMessageChannel(object):
|
|||
input_exhausted.wait()
|
||||
|
||||
assert requests_received == [
|
||||
(channel, REQUESTS[0]['arguments']),
|
||||
(channel, 'unknown', REQUESTS[1]['arguments']),
|
||||
(channel, REQUESTS[2]['arguments']),
|
||||
(channel, REQUESTS[0]["arguments"]),
|
||||
(channel, "unknown", REQUESTS[1]["arguments"]),
|
||||
(channel, REQUESTS[2]["arguments"]),
|
||||
]
|
||||
|
||||
assert output == [
|
||||
{'seq': 1, 'type': 'response', 'request_seq': 1, 'command': 'next', 'success': True, 'body': {'threadId': 7}},
|
||||
{'seq': 2, 'type': 'response', 'request_seq': 2, 'command': 'unknown', 'success': True},
|
||||
{'seq': 3, 'type': 'response', 'request_seq': 3, 'command': 'pause', 'success': False, 'message': 'pause error'},
|
||||
{
|
||||
"seq": 1,
|
||||
"type": "response",
|
||||
"request_seq": 1,
|
||||
"command": "next",
|
||||
"success": True,
|
||||
"body": {"threadId": 7},
|
||||
},
|
||||
{
|
||||
"seq": 2,
|
||||
"type": "response",
|
||||
"request_seq": 2,
|
||||
"command": "unknown",
|
||||
"success": True,
|
||||
},
|
||||
{
|
||||
"seq": 3,
|
||||
"type": "response",
|
||||
"request_seq": 3,
|
||||
"command": "pause",
|
||||
"success": False,
|
||||
"message": "pause error",
|
||||
},
|
||||
]
|
||||
|
||||
def test_responses(self):
|
||||
|
|
@ -200,20 +249,32 @@ class TestJsonMessageChannel(object):
|
|||
def iter_responses():
|
||||
request1_sent.wait()
|
||||
yield {
|
||||
'seq': 1, 'type': 'response', 'request_seq': 1, 'command': 'next',
|
||||
'success': True, 'body': {'threadId': 3},
|
||||
"seq": 1,
|
||||
"type": "response",
|
||||
"request_seq": 1,
|
||||
"command": "next",
|
||||
"success": True,
|
||||
"body": {"threadId": 3},
|
||||
}
|
||||
|
||||
request2_sent.wait()
|
||||
yield {
|
||||
'seq': 2, 'type': 'response', 'request_seq': 2, 'command': 'pause',
|
||||
'success': False, 'message': 'Invalid message: pause not supported',
|
||||
"seq": 2,
|
||||
"type": "response",
|
||||
"request_seq": 2,
|
||||
"command": "pause",
|
||||
"success": False,
|
||||
"message": "Invalid message: pause not supported",
|
||||
}
|
||||
|
||||
request3_sent.wait()
|
||||
yield {
|
||||
'seq': 3, 'type': 'response', 'request_seq': 3, 'command': 'next',
|
||||
'success': True, 'body': {'threadId': 5},
|
||||
"seq": 3,
|
||||
"type": "response",
|
||||
"request_seq": 3,
|
||||
"command": "next",
|
||||
"success": True,
|
||||
"body": {"threadId": 5},
|
||||
}
|
||||
|
||||
stream = JsonMemoryStream(iter_responses(), [])
|
||||
|
|
@ -221,7 +282,7 @@ class TestJsonMessageChannel(object):
|
|||
channel.start()
|
||||
|
||||
# Blocking wait.
|
||||
request1 = channel.send_request('next')
|
||||
request1 = channel.send_request("next")
|
||||
request1_sent.set()
|
||||
response1_body = request1.wait_for_response()
|
||||
response1 = request1.response
|
||||
|
|
@ -229,15 +290,17 @@ class TestJsonMessageChannel(object):
|
|||
assert response1.success
|
||||
assert response1.request is request1
|
||||
assert response1.body == response1_body
|
||||
assert response1.body == {'threadId': 3}
|
||||
assert response1.body == {"threadId": 3}
|
||||
|
||||
# Async callback, registered before response is received.
|
||||
request2 = channel.send_request('pause')
|
||||
request2 = channel.send_request("pause")
|
||||
response2 = []
|
||||
response2_received = threading.Event()
|
||||
|
||||
def response2_handler(resp):
|
||||
response2.append(resp)
|
||||
response2_received.set()
|
||||
|
||||
request2.on_response(response2_handler)
|
||||
request2_sent.set()
|
||||
response2_received.wait()
|
||||
|
|
@ -246,17 +309,21 @@ class TestJsonMessageChannel(object):
|
|||
assert not response2.success
|
||||
assert response2.request is request2
|
||||
assert response2 is request2.response
|
||||
assert response2.body == messaging.InvalidMessageError('pause not supported', request2)
|
||||
assert response2.body == messaging.InvalidMessageError(
|
||||
"pause not supported", request2
|
||||
)
|
||||
|
||||
# Async callback, registered after response is received.
|
||||
request3 = channel.send_request('next')
|
||||
request3 = channel.send_request("next")
|
||||
request3_sent.set()
|
||||
request3.wait_for_response()
|
||||
response3 = []
|
||||
response3_received = threading.Event()
|
||||
|
||||
def response3_handler(resp):
|
||||
response3.append(resp)
|
||||
response3_received.set()
|
||||
|
||||
request3.on_response(response3_handler)
|
||||
response3_received.wait()
|
||||
response3, = response3
|
||||
|
|
@ -264,149 +331,175 @@ class TestJsonMessageChannel(object):
|
|||
assert response3.success
|
||||
assert response3.request is request3
|
||||
assert response3 is request3.response
|
||||
assert response3.body == {'threadId': 5}
|
||||
assert response3.body == {"threadId": 5}
|
||||
|
||||
def test_yield(self):
|
||||
REQUESTS = [
|
||||
{'seq': 10, 'type': 'request', 'command': 'launch', 'arguments': {'noDebug': False}},
|
||||
{'seq': 20, 'type': 'request', 'command': 'setBreakpoints', 'arguments': {'main.py': 1}},
|
||||
{'seq': 30, 'type': 'event', 'event': 'expected'},
|
||||
{'seq': 40, 'type': 'request', 'command': 'launch', 'arguments': {'noDebug': True}}, # test re-entrancy
|
||||
{'seq': 50, 'type': 'request', 'command': 'setBreakpoints', 'arguments': {'main.py': 2}},
|
||||
{'seq': 60, 'type': 'event', 'event': 'unexpected'},
|
||||
{'seq': 80, 'type': 'request', 'command': 'configurationDone'},
|
||||
{'seq': 90, 'type': 'request', 'command': 'launch'}, # test handler yielding empty body
|
||||
{
|
||||
"seq": 10,
|
||||
"type": "request",
|
||||
"command": "launch",
|
||||
"arguments": {"noDebug": False},
|
||||
},
|
||||
{
|
||||
"seq": 20,
|
||||
"type": "request",
|
||||
"command": "setBreakpoints",
|
||||
"arguments": {"main.py": 1},
|
||||
},
|
||||
{"seq": 30, "type": "event", "event": "expected"},
|
||||
{
|
||||
"seq": 40,
|
||||
"type": "request",
|
||||
"command": "launch",
|
||||
"arguments": {"noDebug": True},
|
||||
}, # test re-entrancy
|
||||
{
|
||||
"seq": 50,
|
||||
"type": "request",
|
||||
"command": "setBreakpoints",
|
||||
"arguments": {"main.py": 2},
|
||||
},
|
||||
{"seq": 60, "type": "event", "event": "unexpected"},
|
||||
{"seq": 80, "type": "request", "command": "configurationDone"},
|
||||
{
|
||||
"seq": 90,
|
||||
"type": "request",
|
||||
"command": "launch",
|
||||
}, # test handler yielding empty body
|
||||
]
|
||||
|
||||
class Handlers(object):
|
||||
|
||||
received = {
|
||||
'launch': 0,
|
||||
'setBreakpoints': 0,
|
||||
'configurationDone': 0,
|
||||
'expected': 0,
|
||||
'unexpected': 0,
|
||||
"launch": 0,
|
||||
"setBreakpoints": 0,
|
||||
"configurationDone": 0,
|
||||
"expected": 0,
|
||||
"unexpected": 0,
|
||||
}
|
||||
|
||||
def launch_request(self, request):
|
||||
assert request.seq in (10, 40, 90)
|
||||
self.received['launch'] += 1
|
||||
self.received["launch"] += 1
|
||||
|
||||
if request.seq == 10: # launch #1
|
||||
assert self.received == {
|
||||
'launch': 1,
|
||||
'setBreakpoints': 0,
|
||||
'configurationDone': 0,
|
||||
'expected': 0,
|
||||
'unexpected': 0,
|
||||
"launch": 1,
|
||||
"setBreakpoints": 0,
|
||||
"configurationDone": 0,
|
||||
"expected": 0,
|
||||
"unexpected": 0,
|
||||
}
|
||||
|
||||
msg = yield # setBreakpoints #1
|
||||
assert msg.seq == 20
|
||||
assert self.received == {
|
||||
'launch': 1,
|
||||
'setBreakpoints': 1,
|
||||
'configurationDone': 0,
|
||||
'expected': 0,
|
||||
'unexpected': 0,
|
||||
"launch": 1,
|
||||
"setBreakpoints": 1,
|
||||
"configurationDone": 0,
|
||||
"expected": 0,
|
||||
"unexpected": 0,
|
||||
}
|
||||
|
||||
msg = yield # expected
|
||||
assert msg.seq == 30
|
||||
assert self.received == {
|
||||
'launch': 1,
|
||||
'setBreakpoints': 1,
|
||||
'configurationDone': 0,
|
||||
'expected': 1,
|
||||
'unexpected': 0,
|
||||
"launch": 1,
|
||||
"setBreakpoints": 1,
|
||||
"configurationDone": 0,
|
||||
"expected": 1,
|
||||
"unexpected": 0,
|
||||
}
|
||||
|
||||
msg = yield # launch #2 + nested messages
|
||||
assert msg.seq == 40
|
||||
assert self.received == {
|
||||
'launch': 2,
|
||||
'setBreakpoints': 2,
|
||||
'configurationDone': 0,
|
||||
'expected': 1,
|
||||
'unexpected': 1,
|
||||
"launch": 2,
|
||||
"setBreakpoints": 2,
|
||||
"configurationDone": 0,
|
||||
"expected": 1,
|
||||
"unexpected": 1,
|
||||
}
|
||||
|
||||
# We should see that it failed, but no exception bubbling up here.
|
||||
assert not msg.response.success
|
||||
assert msg.response.body == messaging.MessageHandlingError('test failure', msg)
|
||||
assert msg.response.body == messaging.MessageHandlingError(
|
||||
"test failure", msg
|
||||
)
|
||||
|
||||
msg = yield # configurationDone
|
||||
assert msg.seq == 80
|
||||
assert self.received == {
|
||||
'launch': 2,
|
||||
'setBreakpoints': 2,
|
||||
'configurationDone': 1,
|
||||
'expected': 1,
|
||||
'unexpected': 1,
|
||||
"launch": 2,
|
||||
"setBreakpoints": 2,
|
||||
"configurationDone": 1,
|
||||
"expected": 1,
|
||||
"unexpected": 1,
|
||||
}
|
||||
|
||||
yield {'answer': 42}
|
||||
yield {"answer": 42}
|
||||
|
||||
elif request.seq == 40: # launch #1
|
||||
assert self.received == {
|
||||
'launch': 2,
|
||||
'setBreakpoints': 1,
|
||||
'configurationDone': 0,
|
||||
'expected': 1,
|
||||
'unexpected': 0,
|
||||
"launch": 2,
|
||||
"setBreakpoints": 1,
|
||||
"configurationDone": 0,
|
||||
"expected": 1,
|
||||
"unexpected": 0,
|
||||
}
|
||||
|
||||
msg = yield # setBreakpoints #2
|
||||
assert msg.seq == 50
|
||||
assert self.received == {
|
||||
'launch': 2,
|
||||
'setBreakpoints': 2,
|
||||
'configurationDone': 0,
|
||||
'expected': 1,
|
||||
'unexpected': 0,
|
||||
"launch": 2,
|
||||
"setBreakpoints": 2,
|
||||
"configurationDone": 0,
|
||||
"expected": 1,
|
||||
"unexpected": 0,
|
||||
}
|
||||
|
||||
msg = yield # unexpected
|
||||
assert msg.seq == 60
|
||||
assert self.received == {
|
||||
'launch': 2,
|
||||
'setBreakpoints': 2,
|
||||
'configurationDone': 0,
|
||||
'expected': 1,
|
||||
'unexpected': 1,
|
||||
"launch": 2,
|
||||
"setBreakpoints": 2,
|
||||
"configurationDone": 0,
|
||||
"expected": 1,
|
||||
"unexpected": 1,
|
||||
}
|
||||
|
||||
request.cant_handle('test failure')
|
||||
request.cant_handle("test failure")
|
||||
|
||||
elif request.seq == 90: # launch #3
|
||||
assert self.received == {
|
||||
'launch': 3,
|
||||
'setBreakpoints': 2,
|
||||
'configurationDone': 1,
|
||||
'expected': 1,
|
||||
'unexpected': 1,
|
||||
"launch": 3,
|
||||
"setBreakpoints": 2,
|
||||
"configurationDone": 1,
|
||||
"expected": 1,
|
||||
"unexpected": 1,
|
||||
}
|
||||
#yield {}
|
||||
# yield {}
|
||||
|
||||
def setBreakpoints_request(self, request):
|
||||
assert request.seq in (20, 50, 70)
|
||||
self.received['setBreakpoints'] += 1
|
||||
return {'which': self.received['setBreakpoints']}
|
||||
self.received["setBreakpoints"] += 1
|
||||
return {"which": self.received["setBreakpoints"]}
|
||||
|
||||
def request(self, request):
|
||||
assert request.seq == 80
|
||||
assert request.command == 'configurationDone'
|
||||
self.received['configurationDone'] += 1
|
||||
assert request.command == "configurationDone"
|
||||
self.received["configurationDone"] += 1
|
||||
return {}
|
||||
|
||||
def expected_event(self, event):
|
||||
assert event.seq == 30
|
||||
self.received['expected'] += 1
|
||||
self.received["expected"] += 1
|
||||
|
||||
def event(self, event):
|
||||
assert event.seq == 60
|
||||
assert event.event == 'unexpected'
|
||||
self.received['unexpected'] += 1
|
||||
assert event.event == "unexpected"
|
||||
self.received["unexpected"] += 1
|
||||
|
||||
input, input_exhausted = self.iter_with_event(REQUESTS)
|
||||
output = []
|
||||
|
|
@ -417,40 +510,64 @@ class TestJsonMessageChannel(object):
|
|||
|
||||
assert output == [
|
||||
{
|
||||
'seq': 1, 'type': 'response', 'request_seq': 20, 'command': 'setBreakpoints',
|
||||
'success': True, 'body': {'which': 1},
|
||||
"seq": 1,
|
||||
"type": "response",
|
||||
"request_seq": 20,
|
||||
"command": "setBreakpoints",
|
||||
"success": True,
|
||||
"body": {"which": 1},
|
||||
},
|
||||
{
|
||||
'seq': 2, 'type': 'response', 'request_seq': 50, 'command': 'setBreakpoints',
|
||||
'success': True, 'body': {'which': 2},
|
||||
"seq": 2,
|
||||
"type": "response",
|
||||
"request_seq": 50,
|
||||
"command": "setBreakpoints",
|
||||
"success": True,
|
||||
"body": {"which": 2},
|
||||
},
|
||||
{
|
||||
'seq': 3, 'type': 'response', 'request_seq': 40, 'command': 'launch',
|
||||
'success': False, 'message': 'test failure',
|
||||
"seq": 3,
|
||||
"type": "response",
|
||||
"request_seq": 40,
|
||||
"command": "launch",
|
||||
"success": False,
|
||||
"message": "test failure",
|
||||
},
|
||||
{
|
||||
'seq': 4, 'type': 'response', 'request_seq': 80, 'command': 'configurationDone',
|
||||
'success': True,
|
||||
"seq": 4,
|
||||
"type": "response",
|
||||
"request_seq": 80,
|
||||
"command": "configurationDone",
|
||||
"success": True,
|
||||
},
|
||||
{
|
||||
'seq': 5, 'type': 'response', 'request_seq': 10, 'command': 'launch',
|
||||
'success': True, 'body': {'answer': 42},
|
||||
"seq": 5,
|
||||
"type": "response",
|
||||
"request_seq": 10,
|
||||
"command": "launch",
|
||||
"success": True,
|
||||
"body": {"answer": 42},
|
||||
},
|
||||
{
|
||||
'seq': 6, 'type': 'response', 'request_seq': 90, 'command': 'launch',
|
||||
'success': True,
|
||||
"seq": 6,
|
||||
"type": "response",
|
||||
"request_seq": 90,
|
||||
"command": "launch",
|
||||
"success": True,
|
||||
},
|
||||
]
|
||||
|
||||
def test_invalid_request_handling(self):
|
||||
REQUESTS = [
|
||||
{
|
||||
'seq': 1, 'type': 'request', 'command': 'stackTrace',
|
||||
'arguments': {"AAA": {}},
|
||||
"seq": 1,
|
||||
"type": "request",
|
||||
"command": "stackTrace",
|
||||
"arguments": {"AAA": {}},
|
||||
},
|
||||
{'seq': 2, 'type': 'request', 'command': 'stackTrace', 'arguments': {}},
|
||||
{'seq': 3, 'type': 'request', 'command': 'unknown', 'arguments': None},
|
||||
{'seq': 4, 'type': 'request', 'command': 'pause'},
|
||||
{"seq": 2, "type": "request", "command": "stackTrace", "arguments": {}},
|
||||
{"seq": 3, "type": "request", "command": "unknown", "arguments": None},
|
||||
{"seq": 4, "type": "request", "command": "pause"},
|
||||
]
|
||||
|
||||
class Handlers(object):
|
||||
|
|
@ -476,28 +593,39 @@ class TestJsonMessageChannel(object):
|
|||
|
||||
assert output == [
|
||||
{
|
||||
'seq': 1, 'type': 'response', 'request_seq': 1,
|
||||
'command': 'stackTrace', 'success': False,
|
||||
'message': missing_property("BBB"),
|
||||
"seq": 1,
|
||||
"type": "response",
|
||||
"request_seq": 1,
|
||||
"command": "stackTrace",
|
||||
"success": False,
|
||||
"message": missing_property("BBB"),
|
||||
},
|
||||
{
|
||||
'seq': 2, 'type': 'response', 'request_seq': 2,
|
||||
'command': 'stackTrace', 'success': False,
|
||||
'message': missing_property("AAA"),
|
||||
"seq": 2,
|
||||
"type": "response",
|
||||
"request_seq": 2,
|
||||
"command": "stackTrace",
|
||||
"success": False,
|
||||
"message": missing_property("AAA"),
|
||||
},
|
||||
{
|
||||
'seq': 3, 'type': 'response', 'request_seq': 3,
|
||||
'command': 'unknown', 'success': False,
|
||||
'message': missing_property("CCC"),
|
||||
"seq": 3,
|
||||
"type": "response",
|
||||
"request_seq": 3,
|
||||
"command": "unknown",
|
||||
"success": False,
|
||||
"message": missing_property("CCC"),
|
||||
},
|
||||
{
|
||||
'seq': 4, 'type': 'response', 'request_seq': 4,
|
||||
'command': 'pause', 'success': False,
|
||||
'message': missing_property("DDD"),
|
||||
"seq": 4,
|
||||
"type": "response",
|
||||
"request_seq": 4,
|
||||
"command": "pause",
|
||||
"success": False,
|
||||
"message": missing_property("DDD"),
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def test_fuzz(self):
|
||||
# Set up two channels over the same stream that send messages to each other
|
||||
# asynchronously, and record everything that they send and receive.
|
||||
|
|
@ -514,7 +642,10 @@ class TestJsonMessageChannel(object):
|
|||
self.done = False
|
||||
|
||||
def start(self, channel):
|
||||
self._worker = threading.Thread(name=self.name, target=lambda: self._send_requests_and_events(channel))
|
||||
self._worker = threading.Thread(
|
||||
name=self.name,
|
||||
target=lambda: self._send_requests_and_events(channel),
|
||||
)
|
||||
self._worker.daemon = True
|
||||
self._worker.start()
|
||||
|
||||
|
|
@ -526,24 +657,25 @@ class TestJsonMessageChannel(object):
|
|||
self.done = True
|
||||
|
||||
def fizz_event(self, event):
|
||||
assert event.event == 'fizz'
|
||||
assert event.event == "fizz"
|
||||
with self.lock:
|
||||
self.received.append(('event', 'fizz', event.body))
|
||||
self.received.append(("event", "fizz", event.body))
|
||||
|
||||
def buzz_event(self, event):
|
||||
assert event.event == 'buzz'
|
||||
assert event.event == "buzz"
|
||||
with self.lock:
|
||||
self.received.append(('event', 'buzz', event.body))
|
||||
self.received.append(("event", "buzz", event.body))
|
||||
|
||||
def event(self, event):
|
||||
with self.lock:
|
||||
self.received.append(('event', event.event, event.body))
|
||||
self.received.append(("event", event.event, event.body))
|
||||
|
||||
def make_and_log_response(self, request):
|
||||
x = random.randint(-100, 100)
|
||||
if x < 0:
|
||||
exc_type = (
|
||||
messaging.InvalidMessageError if x % 2
|
||||
messaging.InvalidMessageError
|
||||
if x % 2
|
||||
else messaging.MessageHandlingError
|
||||
)
|
||||
x = exc_type(str(x), request)
|
||||
|
|
@ -552,39 +684,43 @@ class TestJsonMessageChannel(object):
|
|||
return x
|
||||
|
||||
def fizz_request(self, request):
|
||||
assert request.command == 'fizz'
|
||||
assert request.command == "fizz"
|
||||
with self.lock:
|
||||
self.received.append(('request', 'fizz', request.arguments))
|
||||
self.received.append(("request", "fizz", request.arguments))
|
||||
return self.make_and_log_response(request)
|
||||
|
||||
def buzz_request(self, request):
|
||||
assert request.command == 'buzz'
|
||||
assert request.command == "buzz"
|
||||
with self.lock:
|
||||
self.received.append(('request', 'buzz', request.arguments))
|
||||
self.received.append(("request", "buzz", request.arguments))
|
||||
return self.make_and_log_response(request)
|
||||
|
||||
def request(self, request):
|
||||
with self.lock:
|
||||
self.received.append(('request', request.command, request.arguments))
|
||||
self.received.append(
|
||||
("request", request.command, request.arguments)
|
||||
)
|
||||
return self.make_and_log_response(request)
|
||||
|
||||
def _got_response(self, response):
|
||||
with self.lock:
|
||||
self.responses_received.append((response.request.seq, response.body))
|
||||
self.responses_received.append(
|
||||
(response.request.seq, response.body)
|
||||
)
|
||||
|
||||
def _send_requests_and_events(self, channel):
|
||||
types = [random.choice(('event', 'request')) for _ in range(0, 100)]
|
||||
types = [random.choice(("event", "request")) for _ in range(0, 100)]
|
||||
|
||||
for typ in types:
|
||||
name = random.choice(('fizz', 'buzz', 'fizzbuzz'))
|
||||
name = random.choice(("fizz", "buzz", "fizzbuzz"))
|
||||
body = random.randint(0, 100)
|
||||
|
||||
with self.lock:
|
||||
self.sent.append((typ, name, body))
|
||||
|
||||
if typ == 'event':
|
||||
if typ == "event":
|
||||
channel.send_event(name, body)
|
||||
elif typ == 'request':
|
||||
elif typ == "request":
|
||||
req = channel.send_request(name, body)
|
||||
req.on_response(self._got_response)
|
||||
|
||||
|
|
@ -592,7 +728,9 @@ class TestJsonMessageChannel(object):
|
|||
|
||||
# Spin until we receive "done", and also get responses to all requests.
|
||||
requests_sent = types.count("request")
|
||||
print(fmt("{0} waiting for {1} responses ...", self.name, requests_sent))
|
||||
print(
|
||||
fmt("{0} waiting for {1} responses ...", self.name, requests_sent)
|
||||
)
|
||||
while True:
|
||||
with self.lock:
|
||||
if self.done:
|
||||
|
|
@ -600,23 +738,25 @@ class TestJsonMessageChannel(object):
|
|||
break
|
||||
time.sleep(0.1)
|
||||
|
||||
fuzzer1 = Fuzzer('fuzzer1')
|
||||
fuzzer2 = Fuzzer('fuzzer2')
|
||||
fuzzer1 = Fuzzer("fuzzer1")
|
||||
fuzzer2 = Fuzzer("fuzzer2")
|
||||
|
||||
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
server_socket.bind(('localhost', 0))
|
||||
server_socket.bind(("localhost", 0))
|
||||
_, port = server_socket.getsockname()
|
||||
server_socket.listen(0)
|
||||
|
||||
socket1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
socket1_thread = threading.Thread(target=lambda: socket1.connect(('localhost', port)))
|
||||
socket1_thread = threading.Thread(
|
||||
target=lambda: socket1.connect(("localhost", port))
|
||||
)
|
||||
socket1_thread.start()
|
||||
socket2, _ = server_socket.accept()
|
||||
socket1_thread.join()
|
||||
|
||||
try:
|
||||
io1 = socket1.makefile('rwb', 0)
|
||||
io2 = socket2.makefile('rwb', 0)
|
||||
io1 = socket1.makefile("rwb", 0)
|
||||
io2 = socket2.makefile("rwb", 0)
|
||||
|
||||
stream1 = messaging.JsonIOStream(io1, io1, "socket1")
|
||||
channel1 = messaging.JsonMessageChannel(stream1, fuzzer1)
|
||||
|
|
|
|||
|
|
@ -9,11 +9,11 @@ from ptvsd.common.socket import create_server, close_socket
|
|||
|
||||
|
||||
class TestSocketServerReuse(object):
|
||||
HOST1 = '127.0.0.1'
|
||||
HOST1 = "127.0.0.1"
|
||||
# NOTE: Windows allows loopback range 127/8. Some flavors of Linux support
|
||||
# 127/8 range. Mac by default supports only 127/0. Configuring /etc/network/interface
|
||||
# for this one test is overkill so use '0.0.0.0' on Mac instead.
|
||||
HOST2 = '127.0.0.2' if platform.system() in ['Windows', 'Linux'] else '0.0.0.0'
|
||||
HOST2 = "127.0.0.2" if platform.system() in ["Windows", "Linux"] else "0.0.0.0"
|
||||
|
||||
def test_reuse_same_address_port(self):
|
||||
# NOTE: This test should ensure that same address port can be used by two
|
||||
|
|
@ -47,114 +47,114 @@ class TestSocketServerReuse(object):
|
|||
|
||||
class TestAddress(object):
|
||||
def test_from_raw(self):
|
||||
serverlocal = Address.as_server('localhost', 9876)
|
||||
serverremote = Address.as_server('1.2.3.4', 9876)
|
||||
clientlocal = Address.as_client('localhost', 9876)
|
||||
clientremote = Address.as_client('1.2.3.4', 9876)
|
||||
serverlocal = Address.as_server("localhost", 9876)
|
||||
serverremote = Address.as_server("1.2.3.4", 9876)
|
||||
clientlocal = Address.as_client("localhost", 9876)
|
||||
clientremote = Address.as_client("1.2.3.4", 9876)
|
||||
default = Address(None, 1111)
|
||||
external = Address('', 1111)
|
||||
external = Address("", 1111)
|
||||
values = [
|
||||
(serverlocal, serverlocal),
|
||||
(serverremote, serverremote),
|
||||
(clientlocal, clientlocal),
|
||||
(clientremote, clientremote),
|
||||
(None, default),
|
||||
('', external),
|
||||
("", external),
|
||||
([], default),
|
||||
({}, default),
|
||||
(9876, serverlocal),
|
||||
('localhost:9876', clientlocal),
|
||||
('1.2.3.4:9876', clientremote),
|
||||
('*:9876', Address.as_server('', 9876)),
|
||||
('*', external),
|
||||
(':9876', Address.as_server('', 9876)),
|
||||
('localhost', Address('localhost', 1111)),
|
||||
(':', external),
|
||||
(dict(host='localhost'), Address('localhost', 1111)),
|
||||
("localhost:9876", clientlocal),
|
||||
("1.2.3.4:9876", clientremote),
|
||||
("*:9876", Address.as_server("", 9876)),
|
||||
("*", external),
|
||||
(":9876", Address.as_server("", 9876)),
|
||||
("localhost", Address("localhost", 1111)),
|
||||
(":", external),
|
||||
(dict(host="localhost"), Address("localhost", 1111)),
|
||||
(dict(port=9876), serverlocal),
|
||||
(dict(host=None, port=9876), serverlocal),
|
||||
(dict(host='localhost', port=9876), clientlocal),
|
||||
(dict(host='localhost', port='9876'), clientlocal),
|
||||
(dict(host="localhost", port=9876), clientlocal),
|
||||
(dict(host="localhost", port="9876"), clientlocal),
|
||||
]
|
||||
for value, expected in values:
|
||||
addr = Address.from_raw(value, defaultport=1111)
|
||||
assert addr == expected
|
||||
|
||||
@pytest.mark.parametrize('host', ['localhost', '127.0.0.1', '::', '1.2.3.4'])
|
||||
@pytest.mark.parametrize("host", ["localhost", "127.0.0.1", "::", "1.2.3.4"])
|
||||
def test_as_server_valid_address(self, host):
|
||||
addr = Address.as_server(host, 9786)
|
||||
assert addr == Address(host, 9786, isserver=True)
|
||||
|
||||
def test_as_server_public_host(self):
|
||||
addr = Address.as_server('', 9786)
|
||||
assert addr == Address('', 9786, isserver=True)
|
||||
addr = Address.as_server("", 9786)
|
||||
assert addr == Address("", 9786, isserver=True)
|
||||
|
||||
def test_as_server_default_host(self):
|
||||
addr = Address.as_server(None, 9786)
|
||||
assert addr == Address('localhost', 9786, isserver=True)
|
||||
assert addr == Address("localhost", 9786, isserver=True)
|
||||
|
||||
@pytest.mark.parametrize('host', [None, '', 'localhost', '1.2.3.4'])
|
||||
@pytest.mark.parametrize("host", [None, "", "localhost", "1.2.3.4"])
|
||||
def test_as_server_bad_port(self, host):
|
||||
port = None
|
||||
with pytest.raises(TypeError):
|
||||
Address.as_server(host, port)
|
||||
|
||||
@pytest.mark.parametrize('host', [None, '', 'localhost', '1.2.3.4'])
|
||||
@pytest.mark.parametrize('port', ['', -1, 65536])
|
||||
@pytest.mark.parametrize("host", [None, "", "localhost", "1.2.3.4"])
|
||||
@pytest.mark.parametrize("port", ["", -1, 65536])
|
||||
def test_as_server_bad_port2(self, host, port):
|
||||
with pytest.raises(ValueError):
|
||||
Address.as_server(host, port)
|
||||
|
||||
@pytest.mark.parametrize('host', ['localhost', '127.0.0.1', '::', '1.2.3.4'])
|
||||
@pytest.mark.parametrize("host", ["localhost", "127.0.0.1", "::", "1.2.3.4"])
|
||||
def test_as_client_valid_address(self, host):
|
||||
addr = Address.as_client(host, 9786)
|
||||
assert addr == Address(host, 9786, isserver=False)
|
||||
|
||||
def test_as_client_public_host(self):
|
||||
addr = Address.as_client('', 9786)
|
||||
assert addr == Address('', 9786, isserver=False)
|
||||
addr = Address.as_client("", 9786)
|
||||
assert addr == Address("", 9786, isserver=False)
|
||||
|
||||
def test_as_client_default_host(self):
|
||||
addr = Address.as_client(None, 9786)
|
||||
assert addr == Address('localhost', 9786, isserver=False)
|
||||
assert addr == Address("localhost", 9786, isserver=False)
|
||||
|
||||
@pytest.mark.parametrize('host', [None, '', 'localhost', '1.2.3.4'])
|
||||
@pytest.mark.parametrize("host", [None, "", "localhost", "1.2.3.4"])
|
||||
def test_as_client_bad_port(self, host):
|
||||
port = None
|
||||
with pytest.raises(TypeError):
|
||||
Address.as_client(host, port)
|
||||
|
||||
@pytest.mark.parametrize('host', [None, '', 'localhost', '1.2.3.4'])
|
||||
@pytest.mark.parametrize('port', ['', -1, 65536])
|
||||
@pytest.mark.parametrize("host", [None, "", "localhost", "1.2.3.4"])
|
||||
@pytest.mark.parametrize("port", ["", -1, 65536])
|
||||
def test_as_client_bad_port2(self, host, port):
|
||||
with pytest.raises(ValueError):
|
||||
Address.as_client(host, port)
|
||||
|
||||
@pytest.mark.parametrize('host', ['localhost', '127.0.0.1', '::', '1.2.3.4'])
|
||||
@pytest.mark.parametrize("host", ["localhost", "127.0.0.1", "::", "1.2.3.4"])
|
||||
def test_new_valid_address(self, host):
|
||||
addr = Address(host, 9786)
|
||||
assert addr == Address(host, 9786, isserver=False)
|
||||
|
||||
def test_new_public_host(self):
|
||||
addr = Address('', 9786)
|
||||
assert addr == Address('', 9786, isserver=True)
|
||||
addr = Address("", 9786)
|
||||
assert addr == Address("", 9786, isserver=True)
|
||||
|
||||
def test_new_default_host(self):
|
||||
addr = Address(None, 9786)
|
||||
assert addr == Address('localhost', 9786, isserver=True)
|
||||
assert addr == Address("localhost", 9786, isserver=True)
|
||||
|
||||
def test_new_wildcard_host(self):
|
||||
addr = Address('*', 9786)
|
||||
assert addr == Address('', 9786, isserver=True)
|
||||
addr = Address("*", 9786)
|
||||
assert addr == Address("", 9786, isserver=True)
|
||||
|
||||
@pytest.mark.parametrize('host', [None, '', 'localhost', '1.2.3.4'])
|
||||
@pytest.mark.parametrize("host", [None, "", "localhost", "1.2.3.4"])
|
||||
def test_new_bad_port(self, host):
|
||||
port = None
|
||||
with pytest.raises(TypeError):
|
||||
Address(host, port)
|
||||
|
||||
@pytest.mark.parametrize('host', [None, '', 'localhost', '1.2.3.4'])
|
||||
@pytest.mark.parametrize('port', ['', -1, 65536])
|
||||
@pytest.mark.parametrize("host", [None, "", "localhost", "1.2.3.4"])
|
||||
@pytest.mark.parametrize("port", ["", -1, 65536])
|
||||
def test_new_bad_port2(self, host, port):
|
||||
with pytest.raises(ValueError):
|
||||
Address(host, port)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import pytest
|
|||
from tests import debug
|
||||
|
||||
|
||||
@pytest.mark.parametrize('run_as', ['file', 'module', 'code'])
|
||||
@pytest.mark.parametrize("run_as", ["file", "module", "code"])
|
||||
def test_args(pyfile, start_method, run_as):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
|
|
@ -17,16 +17,14 @@ def test_args(pyfile, start_method, run_as):
|
|||
import sys
|
||||
|
||||
print(sys.argv)
|
||||
assert sys.argv[1] == '--arg1'
|
||||
assert sys.argv[2] == 'arg2'
|
||||
assert sys.argv[3] == '-arg3'
|
||||
assert sys.argv[1] == "--arg1"
|
||||
assert sys.argv[2] == "arg2"
|
||||
assert sys.argv[3] == "-arg3"
|
||||
|
||||
args = ['--arg1', 'arg2', '-arg3']
|
||||
args = ["--arg1", "arg2", "-arg3"]
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
program_args=args
|
||||
target=(run_as, code_to_debug), start_method=start_method, program_args=args
|
||||
)
|
||||
session.start_debugging()
|
||||
|
||||
|
|
|
|||
|
|
@ -11,57 +11,59 @@ from tests.patterns import some
|
|||
from tests.timeline import Event
|
||||
|
||||
|
||||
@pytest.mark.parametrize('wait_for_attach', ['waitOn', 'waitOff'])
|
||||
@pytest.mark.parametrize('is_attached', ['attachCheckOn', 'attachCheckOff'])
|
||||
@pytest.mark.parametrize('break_into', ['break', 'pause'])
|
||||
@pytest.mark.parametrize("wait_for_attach", ["waitOn", "waitOff"])
|
||||
@pytest.mark.parametrize("is_attached", ["attachCheckOn", "attachCheckOff"])
|
||||
@pytest.mark.parametrize("break_into", ["break", "pause"])
|
||||
def test_attach(run_as, wait_for_attach, is_attached, break_into):
|
||||
attach1_py = str(test_data / 'attach' / 'attach1.py')
|
||||
attach1_py = str(test_data / "attach" / "attach1.py")
|
||||
with debug.Session() as session:
|
||||
env = {
|
||||
'PTVSD_TEST_HOST': 'localhost',
|
||||
'PTVSD_TEST_PORT': str(session.ptvsd_port),
|
||||
"PTVSD_TEST_HOST": "localhost",
|
||||
"PTVSD_TEST_PORT": str(session.ptvsd_port),
|
||||
}
|
||||
if wait_for_attach == 'waitOn':
|
||||
env['PTVSD_WAIT_FOR_ATTACH'] = '1'
|
||||
if is_attached == 'attachCheckOn':
|
||||
env['PTVSD_IS_ATTACHED'] = '1'
|
||||
if break_into == 'break':
|
||||
env['PTVSD_BREAK_INTO_DBG'] = '1'
|
||||
if wait_for_attach == "waitOn":
|
||||
env["PTVSD_WAIT_FOR_ATTACH"] = "1"
|
||||
if is_attached == "attachCheckOn":
|
||||
env["PTVSD_IS_ATTACHED"] = "1"
|
||||
if break_into == "break":
|
||||
env["PTVSD_BREAK_INTO_DBG"] = "1"
|
||||
|
||||
session.initialize(
|
||||
target=(run_as, attach1_py),
|
||||
start_method='launch',
|
||||
start_method="launch",
|
||||
env=env,
|
||||
use_backchannel=True,
|
||||
)
|
||||
session.start_debugging()
|
||||
|
||||
if wait_for_attach == 'waitOn':
|
||||
assert session.read_json() == 'wait_for_attach'
|
||||
if wait_for_attach == "waitOn":
|
||||
assert session.read_json() == "wait_for_attach"
|
||||
|
||||
if is_attached == 'attachCheckOn':
|
||||
assert session.read_json() == 'is_attached'
|
||||
if is_attached == "attachCheckOn":
|
||||
assert session.read_json() == "is_attached"
|
||||
|
||||
if break_into == 'break':
|
||||
assert session.read_json() == 'break_into_debugger'
|
||||
if break_into == "break":
|
||||
assert session.read_json() == "break_into_debugger"
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert 32 == frames[0]['line']
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert 32 == frames[0]["line"]
|
||||
else:
|
||||
# pause test
|
||||
session.write_json('pause_test')
|
||||
session.send_request('pause').wait_for_response(freeze=False)
|
||||
hit = session.wait_for_thread_stopped(reason='pause')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
session.write_json("pause_test")
|
||||
session.send_request("pause").wait_for_response(freeze=False)
|
||||
hit = session.wait_for_thread_stopped(reason="pause")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
# Note: no longer asserting line as it can even stop on different files
|
||||
# (such as as backchannel.py).
|
||||
# assert frames[0]['line'] in [27, 28, 29]
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('start_method', ['attach_socket_cmdline', 'attach_socket_import'])
|
||||
@pytest.mark.parametrize(
|
||||
"start_method", ["attach_socket_cmdline", "attach_socket_import"]
|
||||
)
|
||||
def test_reattach(pyfile, start_method, run_as):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
|
|
@ -70,12 +72,12 @@ def test_reattach(pyfile, start_method, run_as):
|
|||
import backchannel
|
||||
|
||||
ptvsd.break_into_debugger()
|
||||
print('first')
|
||||
backchannel.write_json('continued')
|
||||
print("first")
|
||||
backchannel.write_json("continued")
|
||||
for _ in range(0, 100):
|
||||
time.sleep(0.1)
|
||||
ptvsd.break_into_debugger()
|
||||
print('second')
|
||||
print("second")
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
|
|
@ -87,51 +89,50 @@ def test_reattach(pyfile, start_method, run_as):
|
|||
)
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert 7 == frames[0]['line']
|
||||
session.send_request('disconnect').wait_for_response(freeze=False)
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert 7 == frames[0]["line"]
|
||||
session.send_request("disconnect").wait_for_response(freeze=False)
|
||||
session.wait_for_disconnect()
|
||||
assert session.read_json() == 'continued'
|
||||
assert session.read_json() == "continued"
|
||||
|
||||
# re-attach
|
||||
with session.connect_with_new_session(
|
||||
target=(run_as, code_to_debug),
|
||||
) as session2:
|
||||
with session.connect_with_new_session(target=(run_as, code_to_debug)) as session2:
|
||||
session2.start_debugging()
|
||||
hit = session2.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert 12 == frames[0]['line']
|
||||
session2.send_request('disconnect').wait_for_response(freeze=False)
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert 12 == frames[0]["line"]
|
||||
session2.send_request("disconnect").wait_for_response(freeze=False)
|
||||
session2.wait_for_disconnect()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('run_as', ['file', 'module', 'code'])
|
||||
@pytest.mark.skip(reason='Enable after #846, #863 and #1144 are fixed')
|
||||
def test_attaching_by_pid(pyfile, run_as):
|
||||
@pytest.mark.parametrize("start_method", ["attach_pid"])
|
||||
@pytest.mark.parametrize("run_as", ["file", "module", "code"])
|
||||
@pytest.mark.skip(reason="Enable after #846, #863 and #1144 are fixed")
|
||||
def test_attaching_by_pid(pyfile, run_as, start_method):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
import time
|
||||
|
||||
def do_something(i):
|
||||
time.sleep(0.1)
|
||||
print(i)
|
||||
print(i) # @break
|
||||
|
||||
for i in range(100):
|
||||
do_something(i)
|
||||
|
||||
bp_line = 5
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method='attach_pid',
|
||||
)
|
||||
session.set_breakpoints(code_to_debug, [bp_line])
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["break"]])
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert bp_line == frames[0]['line']
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert code_to_debug.lines["break"] == frames[0]["line"]
|
||||
|
||||
# remove breakpoint and continue
|
||||
session.set_breakpoints(code_to_debug, [])
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.wait_for_next(Event('output', ANY.dict_with({'category': 'stdout'})))
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_next(
|
||||
Event("output", some.dict.containing({"category": "stdout"}))
|
||||
)
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -9,55 +9,49 @@ import pytest
|
|||
from tests import debug
|
||||
|
||||
|
||||
@pytest.mark.parametrize('run_as', ['file', 'module', 'code'])
|
||||
@pytest.mark.parametrize("run_as", ["file", "module", "code"])
|
||||
def test_with_wait_for_attach(pyfile, start_method, run_as):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
# NOTE: These tests verify break_into_debugger for launch
|
||||
# and attach cases. For attach this is always after wait_for_attach
|
||||
from debug_me import ptvsd
|
||||
|
||||
ptvsd.break_into_debugger()
|
||||
print('break here')
|
||||
print("break here")
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert frames[0]['line'] == 7
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert frames[0]["line"] == 7
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('run_as', ['file', 'module', 'code'])
|
||||
@pytest.mark.skip(reason='https://github.com/microsoft/ptvsd/issues/1505')
|
||||
@pytest.mark.parametrize("run_as", ["file", "module", "code"])
|
||||
@pytest.mark.skip(reason="https://github.com/microsoft/ptvsd/issues/1505")
|
||||
def test_breakpoint_function(pyfile, start_method, run_as):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
# NOTE: These tests verify break_into_debugger for launch
|
||||
# and attach cases. For attach this is always after wait_for_attach
|
||||
import debug_me # noqa
|
||||
|
||||
# TODO: use ptvsd.break_into_debugger() on <3.7
|
||||
breakpoint() # noqa
|
||||
print('break here')
|
||||
breakpoint() # noqa
|
||||
print("break here")
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
path = frames[0]['source']['path']
|
||||
assert path.endswith('code_to_debug.py') or path.endswith('<string>')
|
||||
assert frames[0]['line'] == 6
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
path = frames[0]["source"]["path"]
|
||||
assert path.endswith("code_to_debug.py") or path.endswith("<string>")
|
||||
assert frames[0]["line"] == 6
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ BP_TEST_ROOT = test_data / "bp"
|
|||
|
||||
|
||||
def test_path_with_ampersand(start_method, run_as):
|
||||
test_py = str(BP_TEST_ROOT / 'a&b' / 'test.py')
|
||||
test_py = str(BP_TEST_ROOT / "a&b" / "test.py")
|
||||
lines = code.get_marked_line_numbers(test_py)
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
|
|
@ -28,101 +28,110 @@ def test_path_with_ampersand(start_method, run_as):
|
|||
session.set_breakpoints(test_py, [lines["two"]])
|
||||
session.start_debugging()
|
||||
|
||||
session.wait_for_stop('breakpoint', expected_frames=[
|
||||
ANY.dict_with({"source": ANY.source(test_py)}),
|
||||
])
|
||||
session.wait_for_stop(
|
||||
"breakpoint",
|
||||
expected_frames=[some.dict.containing({"source": some.source(test_py)})],
|
||||
)
|
||||
|
||||
session.request_continue()
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.version_info < (3, 0), reason='Paths are not Unicode in Python 2.7')
|
||||
@pytest.mark.skipif(
|
||||
platform.system() == 'Windows' and sys.version_info < (3, 6),
|
||||
reason='https://github.com/Microsoft/ptvsd/issues/1124#issuecomment-459506802')
|
||||
sys.version_info < (3, 0), reason="Paths are not Unicode in Python 2.7"
|
||||
)
|
||||
@pytest.mark.skipif(
|
||||
platform.system() == "Windows" and sys.version_info < (3, 6),
|
||||
reason="https://github.com/Microsoft/ptvsd/issues/1124#issuecomment-459506802",
|
||||
)
|
||||
def test_path_with_unicode(start_method, run_as):
|
||||
bp_line = 6
|
||||
testfile = os.path.join(BP_TEST_ROOT, u'ನನ್ನ_ಸ್ಕ್ರಿಪ್ಟ್.py')
|
||||
test_py = os.path.join(BP_TEST_ROOT, "ನನ್ನ_ಸ್ಕ್ರಿಪ್ಟ್.py")
|
||||
lines = code.get_marked_line_numbers(test_py)
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, testfile),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.set_breakpoints(testfile, [bp_line])
|
||||
session.initialize(target=(run_as, test_py), start_method=start_method)
|
||||
session.set_breakpoints(test_py, [lines["bp"]])
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped('breakpoint')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert frames[0]['source']['path'] == Path(testfile)
|
||||
assert u'ಏನಾದರೂ_ಮಾಡು' == frames[0]['name']
|
||||
hit = session.wait_for_thread_stopped("breakpoint")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert frames[0]["source"]["path"] == some.path(test_py)
|
||||
assert "ಏನಾದರೂ_ಮಾಡು" == frames[0]["name"]
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('condition_key', [
|
||||
'condition_var',
|
||||
'hitCondition_#',
|
||||
'hitCondition_eq',
|
||||
'hitCondition_gt',
|
||||
'hitCondition_ge',
|
||||
'hitCondition_lt',
|
||||
'hitCondition_le',
|
||||
'hitCondition_mod',
|
||||
])
|
||||
@pytest.mark.parametrize(
|
||||
"condition_key",
|
||||
[
|
||||
"condition_var",
|
||||
"hitCondition_#",
|
||||
"hitCondition_eq",
|
||||
"hitCondition_gt",
|
||||
"hitCondition_ge",
|
||||
"hitCondition_lt",
|
||||
"hitCondition_le",
|
||||
"hitCondition_mod",
|
||||
],
|
||||
)
|
||||
def test_conditional_breakpoint(pyfile, start_method, run_as, condition_key):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
||||
for i in range(0, 10):
|
||||
print(i)
|
||||
print(i) # @bp
|
||||
|
||||
expected = {
|
||||
'condition_var': ('condition', 'i==5', '5', 1),
|
||||
'hitCondition_#': ('hitCondition', '5', '4', 1),
|
||||
'hitCondition_eq': ('hitCondition', '==5', '4', 1),
|
||||
'hitCondition_gt': ('hitCondition', '>5', '5', 5),
|
||||
'hitCondition_ge': ('hitCondition', '>=5', '4', 6),
|
||||
'hitCondition_lt': ('hitCondition', '<5', '0', 4),
|
||||
'hitCondition_le': ('hitCondition', '<=5', '0', 5),
|
||||
'hitCondition_mod': ('hitCondition', '%3', '2', 3),
|
||||
"condition_var": ("condition", "i==5", "5", 1),
|
||||
"hitCondition_#": ("hitCondition", "5", "4", 1),
|
||||
"hitCondition_eq": ("hitCondition", "==5", "4", 1),
|
||||
"hitCondition_gt": ("hitCondition", ">5", "5", 5),
|
||||
"hitCondition_ge": ("hitCondition", ">=5", "4", 6),
|
||||
"hitCondition_lt": ("hitCondition", "<5", "0", 4),
|
||||
"hitCondition_le": ("hitCondition", "<=5", "0", 5),
|
||||
"hitCondition_mod": ("hitCondition", "%3", "2", 3),
|
||||
}
|
||||
condition_type, condition, value, hits = expected[condition_key]
|
||||
|
||||
bp_line = 4
|
||||
lines = code_to_debug.lines
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.send_request('setBreakpoints', arguments={
|
||||
'source': {'path': code_to_debug},
|
||||
'breakpoints': [{'line': bp_line, condition_type: condition}],
|
||||
}).wait_for_response()
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.send_request(
|
||||
"setBreakpoints",
|
||||
arguments={
|
||||
"source": {"path": code_to_debug},
|
||||
"breakpoints": [{"line": lines["bp"], condition_type: condition}],
|
||||
},
|
||||
).wait_for_response()
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert bp_line == frames[0]['line']
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert lines["bp"] == frames[0]["line"]
|
||||
|
||||
resp_scopes = session.send_request('scopes', arguments={
|
||||
'frameId': hit.frame_id
|
||||
}).wait_for_response()
|
||||
scopes = resp_scopes.body['scopes']
|
||||
resp_scopes = session.send_request(
|
||||
"scopes", arguments={"frameId": hit.frame_id}
|
||||
).wait_for_response()
|
||||
scopes = resp_scopes.body["scopes"]
|
||||
assert len(scopes) > 0
|
||||
|
||||
resp_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': scopes[0]['variablesReference']
|
||||
}).wait_for_response()
|
||||
variables = list(v for v in resp_variables.body['variables']
|
||||
if v['name'] == 'i')
|
||||
resp_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={"variablesReference": scopes[0]["variablesReference"]},
|
||||
).wait_for_response()
|
||||
variables = list(
|
||||
v for v in resp_variables.body["variables"] if v["name"] == "i"
|
||||
)
|
||||
assert variables == [
|
||||
ANY.dict_with({'name': 'i', 'type': 'int', 'value': value, 'evaluateName': 'i'})
|
||||
some.dict.containing(
|
||||
{"name": "i", "type": "int", "value": value, "evaluateName": "i"}
|
||||
)
|
||||
]
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
for i in range(1, hits):
|
||||
session.wait_for_thread_stopped()
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
|
|
@ -130,125 +139,122 @@ def test_crossfile_breakpoint(pyfile, start_method, run_as):
|
|||
@pyfile
|
||||
def script1():
|
||||
import debug_me # noqa
|
||||
|
||||
def do_something():
|
||||
print('do something')
|
||||
print("do something") # @bp
|
||||
|
||||
@pyfile
|
||||
def script2():
|
||||
import debug_me # noqa
|
||||
import script1
|
||||
script1.do_something()
|
||||
print('Done')
|
||||
|
||||
bp_script1_line = 3
|
||||
bp_script2_line = 4
|
||||
script1.do_something() # @bp
|
||||
print("Done")
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, script2),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.set_breakpoints(script1, lines=[bp_script1_line])
|
||||
session.set_breakpoints(script2, lines=[bp_script2_line])
|
||||
session.initialize(target=(run_as, script2), start_method=start_method)
|
||||
session.set_breakpoints(script1, lines=[script1.lines["bp"]])
|
||||
session.set_breakpoints(script2, lines=[script2.lines["bp"]])
|
||||
session.start_debugging()
|
||||
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert bp_script2_line == frames[0]['line']
|
||||
assert frames[0]['source']['path'] == Path(script2)
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert script2.lines["bp"] == frames[0]["line"]
|
||||
assert frames[0]["source"]["path"] == some.path(script2)
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert bp_script1_line == frames[0]['line']
|
||||
assert frames[0]['source']['path'] == Path(script1)
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert script1.lines["bp"] == frames[0]["line"]
|
||||
assert frames[0]["source"]["path"] == some.path(script1)
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('error_name', [
|
||||
'NameError',
|
||||
'OtherError',
|
||||
])
|
||||
@pytest.mark.parametrize("error_name", ["NameError", "OtherError"])
|
||||
def test_error_in_condition(pyfile, start_method, run_as, error_name):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
||||
def do_something_bad():
|
||||
raise ArithmeticError()
|
||||
for i in range(1, 10):
|
||||
|
||||
for i in range(1, 10): # @bp
|
||||
pass
|
||||
|
||||
# NOTE: NameError in condition, is a special case. Pydevd is configured to skip
|
||||
# traceback for name errors. See https://github.com/Microsoft/ptvsd/issues/853
|
||||
# for more details. For all other errors we should be printing traceback.
|
||||
condition = {
|
||||
'NameError': ('x==5'), # 'x' does not exist in the debuggee
|
||||
'OtherError': ('do_something_bad()==5') # throws some error
|
||||
"NameError": ("x==5"), # 'x' does not exist in the debuggee
|
||||
"OtherError": ("do_something_bad()==5"), # throws some error
|
||||
}
|
||||
|
||||
bp_line = 5
|
||||
lines = code_to_debug.lines
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.send_request('setBreakpoints', arguments={
|
||||
'source': {'path': code_to_debug},
|
||||
'breakpoints': [{
|
||||
'line': bp_line,
|
||||
'condition': condition[error_name],
|
||||
}],
|
||||
}).wait_for_response()
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.send_request(
|
||||
"setBreakpoints",
|
||||
arguments={
|
||||
"source": {"path": code_to_debug},
|
||||
"breakpoints": [
|
||||
{"line": lines["bp"], "condition": condition[error_name]}
|
||||
],
|
||||
},
|
||||
).wait_for_response()
|
||||
session.start_debugging()
|
||||
|
||||
session.wait_for_exit()
|
||||
assert session.get_stdout_as_string() == b''
|
||||
if error_name == 'NameError':
|
||||
assert session.get_stderr_as_string().find(b'NameError') == -1
|
||||
assert session.get_stdout_as_string() == b""
|
||||
if error_name == "NameError":
|
||||
assert session.get_stderr_as_string().find(b"NameError") == -1
|
||||
else:
|
||||
assert session.get_stderr_as_string().find(b'ArithmeticError') > 0
|
||||
assert session.get_stderr_as_string().find(b"ArithmeticError") > 0
|
||||
|
||||
|
||||
def test_log_point(pyfile, start_method, run_as):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
||||
a = 10
|
||||
for i in range(1, a):
|
||||
print('value: %d' % i)
|
||||
print("value: %d" % i) # @bp
|
||||
# Break at end too so that we're sure we get all output
|
||||
# events before the break.
|
||||
a = 10
|
||||
a = 10 # @end
|
||||
|
||||
bp_line = 5
|
||||
end_bp_line = 8
|
||||
lines = code_to_debug.lines
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.send_request('setBreakpoints', arguments={
|
||||
'source': {'path': code_to_debug},
|
||||
'breakpoints': [{
|
||||
'line': bp_line,
|
||||
'logMessage': 'log: {a + i}'
|
||||
}, {'line': end_bp_line}],
|
||||
}).wait_for_response()
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.send_request(
|
||||
"setBreakpoints",
|
||||
arguments={
|
||||
"source": {"path": code_to_debug},
|
||||
"breakpoints": [
|
||||
{"line": lines["bp"], "logMessage": "log: {a + i}"},
|
||||
{"line": lines["end"]},
|
||||
],
|
||||
},
|
||||
).wait_for_response()
|
||||
session.start_debugging()
|
||||
|
||||
# Breakpoint at the end just to make sure we get all output events.
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert end_bp_line == frames[0]['line']
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert lines["end"] == frames[0]["line"]
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
session.wait_for_exit()
|
||||
assert session.get_stderr_as_string() == b''
|
||||
assert session.get_stderr_as_string() == b""
|
||||
|
||||
output = session.all_occurrences_of(Event('output', ANY.dict_with({'category': 'stdout'})))
|
||||
output_str = ''.join(o.body['output'] for o in output)
|
||||
output = session.all_occurrences_of(
|
||||
Event("output", some.dict.containing({"category": "stdout"}))
|
||||
)
|
||||
output_str = "".join(o.body["output"] for o in output)
|
||||
logged = sorted(int(i) for i in re.findall(r"log:\s([0-9]*)", output_str))
|
||||
values = sorted(int(i) for i in re.findall(r"value:\s([0-9]*)", output_str))
|
||||
|
||||
|
|
@ -260,63 +266,70 @@ def test_condition_with_log_point(pyfile, start_method, run_as):
|
|||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
||||
a = 10
|
||||
for i in range(1, a):
|
||||
print('value: %d' % i)
|
||||
print("value: %d" % i) # @bp
|
||||
# Break at end too so that we're sure we get all output
|
||||
# events before the break.
|
||||
a = 10
|
||||
a = 10 # @end
|
||||
|
||||
bp_line = 5
|
||||
end_bp_line = 8
|
||||
lines = code_to_debug.lines
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.send_request('setBreakpoints', arguments={
|
||||
'source': {'path': code_to_debug},
|
||||
'breakpoints': [{
|
||||
'line': bp_line,
|
||||
'logMessage': 'log: {a + i}',
|
||||
'condition': 'i==5'
|
||||
}, {'line': end_bp_line}],
|
||||
}).wait_for_response()
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.send_request(
|
||||
"setBreakpoints",
|
||||
arguments={
|
||||
"source": {"path": code_to_debug},
|
||||
"breakpoints": [
|
||||
{
|
||||
"line": lines["bp"],
|
||||
"logMessage": "log: {a + i}",
|
||||
"condition": "i==5",
|
||||
},
|
||||
{"line": lines["end"]},
|
||||
],
|
||||
},
|
||||
).wait_for_response()
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert bp_line == frames[0]['line']
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert lines["end"] == frames[0]["line"]
|
||||
|
||||
resp_scopes = session.send_request('scopes', arguments={
|
||||
'frameId': hit.frame_id
|
||||
}).wait_for_response()
|
||||
scopes = resp_scopes.body['scopes']
|
||||
resp_scopes = session.send_request(
|
||||
"scopes", arguments={"frameId": hit.frame_id}
|
||||
).wait_for_response()
|
||||
scopes = resp_scopes.body["scopes"]
|
||||
assert len(scopes) > 0
|
||||
|
||||
resp_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': scopes[0]['variablesReference']
|
||||
}).wait_for_response()
|
||||
resp_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={"variablesReference": scopes[0]["variablesReference"]},
|
||||
).wait_for_response()
|
||||
variables = list(
|
||||
v for v in resp_variables.body['variables']
|
||||
if v['name'] == 'i'
|
||||
v for v in resp_variables.body["variables"] if v["name"] == "i"
|
||||
)
|
||||
assert variables == [
|
||||
ANY.dict_with({'name': 'i', 'type': 'int', 'value': '5', 'evaluateName': 'i'})
|
||||
some.dict.containing(
|
||||
{"name": "i", "type": "int", "value": "5", "evaluateName": "i"}
|
||||
)
|
||||
]
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
# Breakpoint at the end just to make sure we get all output events.
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert end_bp_line == frames[0]['line']
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert lines["end"] == frames[0]["line"]
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
session.wait_for_exit()
|
||||
assert session.get_stderr_as_string() == b''
|
||||
assert session.get_stderr_as_string() == b""
|
||||
|
||||
output = session.all_occurrences_of(Event('output', ANY.dict_with({'category': 'stdout'})))
|
||||
output_str = ''.join(o.body['output'] for o in output)
|
||||
output = session.all_occurrences_of(
|
||||
Event("output", some.dict.containing({"category": "stdout"}))
|
||||
)
|
||||
output_str = "".join(o.body["output"] for o in output)
|
||||
logged = sorted(int(i) for i in re.findall(r"log:\s([0-9]*)", output_str))
|
||||
values = sorted(int(i) for i in re.findall(r"value:\s([0-9]*)", output_str))
|
||||
|
||||
|
|
@ -325,24 +338,20 @@ def test_condition_with_log_point(pyfile, start_method, run_as):
|
|||
|
||||
|
||||
def test_package_launch():
|
||||
bp_line = 2
|
||||
cwd = get_test_root('testpkgs')
|
||||
testfile = os.path.join(cwd, 'pkg1', '__main__.py')
|
||||
cwd = test_data / "testpkgs"
|
||||
test_py = os.path.join(cwd, "pkg1", "__main__.py")
|
||||
lines = code.get_marked_line_numbers(test_py)
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=('module', 'pkg1'),
|
||||
start_method='launch',
|
||||
cwd=cwd,
|
||||
)
|
||||
session.set_breakpoints(testfile, [bp_line])
|
||||
session.initialize(target=("module", "pkg1"), start_method="launch", cwd=cwd)
|
||||
session.set_breakpoints(test_py, [lines["two"]])
|
||||
session.start_debugging()
|
||||
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert bp_line == frames[0]['line']
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert lines["two"] == frames[0]["line"]
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
|
|
@ -350,34 +359,43 @@ def test_add_and_remove_breakpoint(pyfile, start_method, run_as):
|
|||
@pyfile
|
||||
def code_to_debug():
|
||||
from debug_me import backchannel
|
||||
|
||||
for i in range(0, 10):
|
||||
print(i)
|
||||
print(i) # @bp
|
||||
backchannel.read_json()
|
||||
|
||||
bp_line = 4
|
||||
lines = code_to_debug.lines
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
use_backchannel=True,
|
||||
)
|
||||
session.set_breakpoints(code_to_debug, [bp_line])
|
||||
session.set_breakpoints(code_to_debug, [lines["bp"]])
|
||||
session.start_debugging()
|
||||
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert bp_line == frames[0]['line']
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert lines["bp"] == frames[0]["line"]
|
||||
|
||||
# remove breakpoints in file
|
||||
session.set_breakpoints(code_to_debug, [])
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
session.wait_for_next(Event('output', ANY.dict_with({'category': 'stdout', 'output': '9'})))
|
||||
session.write_json('done')
|
||||
session.wait_for_next(
|
||||
Event("output", some.dict.containing({"category": "stdout", "output": "9"}))
|
||||
)
|
||||
session.write_json("done")
|
||||
session.wait_for_exit()
|
||||
|
||||
output = session.all_occurrences_of(Event('output', ANY.dict_with({'category': 'stdout'})))
|
||||
output = sorted(int(o.body['output'].strip()) for o in output if len(o.body['output'].strip()) > 0)
|
||||
output = session.all_occurrences_of(
|
||||
Event("output", some.dict.containing({"category": "stdout"}))
|
||||
)
|
||||
output = sorted(
|
||||
int(o.body["output"].strip())
|
||||
for o in output
|
||||
if len(o.body["output"].strip()) > 0
|
||||
)
|
||||
assert list(range(0, 10)) == output
|
||||
|
||||
|
||||
|
|
@ -387,54 +405,44 @@ def test_invalid_breakpoints(pyfile, start_method, run_as):
|
|||
import debug_me # noqa
|
||||
|
||||
b = True
|
||||
while b: #@bp1-expected
|
||||
pass #@bp1-requested
|
||||
while b: # @bp1-expected
|
||||
pass # @bp1-requested
|
||||
break
|
||||
|
||||
print() #@bp2-expected
|
||||
[ #@bp2-requested
|
||||
1, 2, 3, #@bp3-expected
|
||||
] #@bp3-requested
|
||||
print() # @bp2-expected
|
||||
[1, 2, 3] # @bp2-requested # @bp3-expected # @bp3-requested
|
||||
|
||||
# Python 2.7 only.
|
||||
print() #@bp4-expected
|
||||
print(1, #@bp4-requested-1
|
||||
2, 3, #@bp4-requested-2
|
||||
4, 5, 6)
|
||||
print() # @bp4-expected
|
||||
print(1, 2, 3, 4, 5, 6) # @bp4-requested-1 # @bp4-requested-2
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
line_numbers = code_to_debug.lines
|
||||
print(line_numbers)
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
|
||||
requested_bps = [
|
||||
line_numbers['bp1-requested'],
|
||||
line_numbers['bp2-requested'],
|
||||
line_numbers['bp3-requested'],
|
||||
line_numbers["bp1-requested"],
|
||||
line_numbers["bp2-requested"],
|
||||
line_numbers["bp3-requested"],
|
||||
]
|
||||
if sys.version_info < (3,):
|
||||
requested_bps += [
|
||||
line_numbers['bp4-requested-1'],
|
||||
line_numbers['bp4-requested-2'],
|
||||
line_numbers["bp4-requested-1"],
|
||||
line_numbers["bp4-requested-2"],
|
||||
]
|
||||
|
||||
actual_bps = session.set_breakpoints(code_to_debug, requested_bps)
|
||||
actual_bps = [bp['line'] for bp in actual_bps]
|
||||
actual_bps = [bp["line"] for bp in actual_bps]
|
||||
|
||||
expected_bps = [
|
||||
line_numbers['bp1-expected'],
|
||||
line_numbers['bp2-expected'],
|
||||
line_numbers['bp3-expected'],
|
||||
line_numbers["bp1-expected"],
|
||||
line_numbers["bp2-expected"],
|
||||
line_numbers["bp3-expected"],
|
||||
]
|
||||
if sys.version_info < (3,):
|
||||
expected_bps += [
|
||||
line_numbers['bp4-expected'],
|
||||
line_numbers['bp4-expected'],
|
||||
]
|
||||
expected_bps += [line_numbers["bp4-expected"], line_numbers["bp4-expected"]]
|
||||
|
||||
assert expected_bps == actual_bps
|
||||
|
||||
|
|
@ -449,11 +457,11 @@ def test_invalid_breakpoints(pyfile, start_method, run_as):
|
|||
|
||||
while expected_bps:
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
line = frames[0]['line']
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
line = frames[0]["line"]
|
||||
assert line == expected_bps[0]
|
||||
del expected_bps[0]
|
||||
session.send_request('continue').wait_for_response()
|
||||
session.send_request("continue").wait_for_response()
|
||||
assert not expected_bps
|
||||
|
||||
session.wait_for_exit()
|
||||
|
|
@ -466,43 +474,40 @@ def test_deep_stacks(pyfile, start_method, run_as):
|
|||
|
||||
def deep_stack(level):
|
||||
if level <= 0:
|
||||
print('done') #@bp
|
||||
print("done") # @bp
|
||||
return level
|
||||
deep_stack(level - 1)
|
||||
|
||||
deep_stack(100)
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
|
||||
bp_line = line_numbers['bp']
|
||||
|
||||
actual_bps = session.set_breakpoints(code_to_debug, [bp_line])
|
||||
actual_bps = [bp['line'] for bp in actual_bps]
|
||||
actual_bps = session.set_breakpoints(code_to_debug, [code_to_debug.lines["bp"]])
|
||||
actual_bps = [bp["line"] for bp in actual_bps]
|
||||
session.start_debugging()
|
||||
|
||||
hit = session.wait_for_thread_stopped()
|
||||
full_frames = hit.stacktrace.body['stackFrames']
|
||||
full_frames = hit.stacktrace.body["stackFrames"]
|
||||
assert len(full_frames) > 100
|
||||
|
||||
# Construct stack from parts
|
||||
frames = []
|
||||
start = 0
|
||||
for _ in range(5):
|
||||
resp_stacktrace = session.send_request('stackTrace', arguments={
|
||||
'threadId': hit.thread_id,
|
||||
'startFrame': start,
|
||||
'levels': 25
|
||||
}).wait_for_response()
|
||||
assert resp_stacktrace.body['totalFrames'] > 0
|
||||
frames += resp_stacktrace.body['stackFrames']
|
||||
resp_stacktrace = session.send_request(
|
||||
"stackTrace",
|
||||
arguments={
|
||||
"threadId": hit.thread_id,
|
||||
"startFrame": start,
|
||||
"levels": 25,
|
||||
},
|
||||
).wait_for_response()
|
||||
assert resp_stacktrace.body["totalFrames"] > 0
|
||||
frames += resp_stacktrace.body["stackFrames"]
|
||||
start = len(frames)
|
||||
|
||||
assert full_frames == frames
|
||||
|
||||
session.send_request('continue').wait_for_response()
|
||||
session.send_request("continue").wait_for_response()
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -13,32 +13,31 @@ from tests.timeline import Event
|
|||
|
||||
|
||||
expected_at_line = {
|
||||
'in_do_something': [
|
||||
{'label': 'SomeClass', 'type': 'class', 'start': 0, 'length': 4},
|
||||
{'label': 'someFunction', 'type': 'function', 'start': 0, 'length': 4},
|
||||
{'label': 'someVariable', 'type': 'field', 'start': 0, 'length': 4},
|
||||
"in_do_something": [
|
||||
{"label": "SomeClass", "type": "class", "start": 0, "length": 4},
|
||||
{"label": "someFunction", "type": "function", "start": 0, "length": 4},
|
||||
{"label": "someVariable", "type": "field", "start": 0, "length": 4},
|
||||
],
|
||||
'in_some_function': [
|
||||
{'label': 'SomeClass', 'type': 'class', 'start': 0, 'length': 4},
|
||||
{'label': 'someFunction', 'type': 'function', 'start': 0, 'length': 4},
|
||||
{'label': 'someVar', 'type': 'field', 'start': 0, 'length': 4},
|
||||
{'label': 'someVariable', 'type': 'field', 'start': 0, 'length': 4},
|
||||
"in_some_function": [
|
||||
{"label": "SomeClass", "type": "class", "start": 0, "length": 4},
|
||||
{"label": "someFunction", "type": "function", "start": 0, "length": 4},
|
||||
{"label": "someVar", "type": "field", "start": 0, "length": 4},
|
||||
{"label": "someVariable", "type": "field", "start": 0, "length": 4},
|
||||
],
|
||||
'done': [
|
||||
{'label': 'SomeClass', 'type': 'class', 'start': 0, 'length': 4},
|
||||
{'label': 'someFunction', 'type': 'function', 'start': 0, 'length': 4},
|
||||
"done": [
|
||||
{"label": "SomeClass", "type": "class", "start": 0, "length": 4},
|
||||
{"label": "someFunction", "type": "function", "start": 0, "length": 4},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize('bp_line', sorted(expected_at_line.keys()))
|
||||
def test_completions_scope(pyfile, bp_line, start_method, run_as):
|
||||
@pytest.mark.parametrize("bp_label", sorted(expected_at_line.keys()))
|
||||
def test_completions_scope(pyfile, bp_label, start_method, run_as):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
||||
class SomeClass():
|
||||
|
||||
class SomeClass:
|
||||
def __init__(self, someVar):
|
||||
self.some_var = someVar
|
||||
|
||||
|
|
@ -50,45 +49,44 @@ def test_completions_scope(pyfile, bp_line, start_method, run_as):
|
|||
someVariable = someVar
|
||||
return SomeClass(someVariable).do_someting() # @in_some_function
|
||||
|
||||
someFunction('value')
|
||||
print('done') # @done
|
||||
someFunction("value")
|
||||
print("done") # @done
|
||||
|
||||
expected = expected_at_line[bp_line]
|
||||
expected = expected_at_line[bp_label]
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
ignore_unobserved=[Event('stopped')],
|
||||
ignore_unobserved=[Event("stopped")],
|
||||
)
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
session.set_breakpoints(code_to_debug, [line_numbers[bp_line]])
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines[bp_label]])
|
||||
session.start_debugging()
|
||||
|
||||
thread_stopped = session.wait_for_next(Event('stopped', ANY.dict_with({'reason': 'breakpoint'})))
|
||||
assert thread_stopped.body['threadId'] is not None
|
||||
tid = thread_stopped.body['threadId']
|
||||
thread_stopped = session.wait_for_next(
|
||||
Event("stopped", some.dict.containing({"reason": "breakpoint"}))
|
||||
)
|
||||
assert thread_stopped.body["threadId"] is not None
|
||||
tid = thread_stopped.body["threadId"]
|
||||
|
||||
resp_stacktrace = session.send_request('stackTrace', arguments={
|
||||
'threadId': tid,
|
||||
}).wait_for_response()
|
||||
assert resp_stacktrace.body['totalFrames'] > 0
|
||||
frames = resp_stacktrace.body['stackFrames']
|
||||
resp_stacktrace = session.send_request(
|
||||
"stackTrace", arguments={"threadId": tid}
|
||||
).wait_for_response()
|
||||
assert resp_stacktrace.body["totalFrames"] > 0
|
||||
frames = resp_stacktrace.body["stackFrames"]
|
||||
assert len(frames) > 0
|
||||
|
||||
fid = frames[0]['id']
|
||||
resp_completions = session.send_request('completions', arguments={
|
||||
'text': 'some',
|
||||
'frameId': fid,
|
||||
'column': 5,
|
||||
}).wait_for_response()
|
||||
targets = resp_completions.body['targets']
|
||||
fid = frames[0]["id"]
|
||||
resp_completions = session.send_request(
|
||||
"completions", arguments={"text": "some", "frameId": fid, "column": 5}
|
||||
).wait_for_response()
|
||||
targets = resp_completions.body["targets"]
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
targets.sort(key=lambda t: t['label'])
|
||||
expected.sort(key=lambda t: t['label'])
|
||||
targets.sort(key=lambda t: t["label"])
|
||||
expected.sort(key=lambda t: t["label"])
|
||||
assert targets == expected
|
||||
|
||||
session.wait_for_exit()
|
||||
|
|
@ -98,58 +96,59 @@ def test_completions_cases(pyfile, start_method, run_as):
|
|||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
||||
a = 1
|
||||
b = {"one": 1, "two": 2}
|
||||
c = 3
|
||||
print([a, b, c]) # @break
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
bp_line = line_numbers['break']
|
||||
bp_file = code_to_debug
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, bp_file),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.set_breakpoints(bp_file, [bp_line])
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["break"]])
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped()
|
||||
|
||||
response = session.send_request('completions', arguments={
|
||||
'frameId': hit.frame_id,
|
||||
'text': 'b.',
|
||||
'column': 3,
|
||||
}).wait_for_response()
|
||||
response = session.send_request(
|
||||
"completions",
|
||||
arguments={"frameId": hit.frame_id, "text": "b.", "column": 3},
|
||||
).wait_for_response()
|
||||
|
||||
labels = set(target['label'] for target in response.body['targets'])
|
||||
assert labels.issuperset(['get', 'items', 'keys', 'setdefault', 'update', 'values'])
|
||||
labels = set(target["label"] for target in response.body["targets"])
|
||||
assert labels.issuperset(
|
||||
["get", "items", "keys", "setdefault", "update", "values"]
|
||||
)
|
||||
|
||||
response = session.send_request('completions', arguments={
|
||||
'frameId': hit.frame_id,
|
||||
'text': 'x = b.setdefault',
|
||||
'column': 13,
|
||||
}).wait_for_response()
|
||||
response = session.send_request(
|
||||
"completions",
|
||||
arguments={
|
||||
"frameId": hit.frame_id,
|
||||
"text": "x = b.setdefault",
|
||||
"column": 13,
|
||||
},
|
||||
).wait_for_response()
|
||||
|
||||
assert response.body['targets'] == [
|
||||
{'label': 'setdefault', 'length': 6, 'start': 6, 'type': 'function'}]
|
||||
assert response.body["targets"] == [
|
||||
{"label": "setdefault", "length": 6, "start": 6, "type": "function"}
|
||||
]
|
||||
|
||||
response = session.send_request('completions', arguments={
|
||||
'frameId': hit.frame_id,
|
||||
'text': 'not_there',
|
||||
'column': 10,
|
||||
}).wait_for_response()
|
||||
response = session.send_request(
|
||||
"completions",
|
||||
arguments={"frameId": hit.frame_id, "text": "not_there", "column": 10},
|
||||
).wait_for_response()
|
||||
|
||||
assert not response.body['targets']
|
||||
assert not response.body["targets"]
|
||||
|
||||
# Check errors
|
||||
with pytest.raises(messaging.MessageHandlingError) as error:
|
||||
response = session.send_request('completions', arguments={
|
||||
'frameId': 9999999, # frameId not available.
|
||||
'text': 'not_there',
|
||||
'column': 10,
|
||||
}).wait_for_response()
|
||||
assert 'Wrong ID sent from the client:' in str(error)
|
||||
response = session.send_request(
|
||||
"completions",
|
||||
arguments={
|
||||
"frameId": 9999999, # frameId not available.
|
||||
"text": "not_there",
|
||||
"column": 10,
|
||||
},
|
||||
).wait_for_response()
|
||||
assert "Wrong ID sent from the client:" in str(error)
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -12,58 +12,59 @@ from tests.patterns import some
|
|||
from tests.timeline import Event
|
||||
|
||||
|
||||
@pytest.mark.parametrize('start_method', ['attach_socket_cmdline', 'attach_socket_import'])
|
||||
@pytest.mark.parametrize(
|
||||
"start_method", ["attach_socket_cmdline", "attach_socket_import"]
|
||||
)
|
||||
def test_continue_on_disconnect_for_attach(pyfile, start_method, run_as):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from debug_me import backchannel
|
||||
backchannel.write_json('continued')
|
||||
|
||||
bp_line = 4
|
||||
backchannel.write_json("continued") # @bp
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
ignore_unobserved=[Event('exited'), Event('terminated')],
|
||||
use_backchannel=True,
|
||||
)
|
||||
session.set_breakpoints(code_to_debug, [bp_line])
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
ignore_unobserved=[Event("exited"), Event("terminated")],
|
||||
use_backchannel=True,
|
||||
)
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["bp"]])
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped('breakpoint')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert frames[0]['line'] == bp_line
|
||||
session.send_request('disconnect').wait_for_response()
|
||||
hit = session.wait_for_thread_stopped("breakpoint")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert frames[0]["line"] == code_to_debug.lines["bp"]
|
||||
session.send_request("disconnect").wait_for_response()
|
||||
session.wait_for_disconnect()
|
||||
assert 'continued' == session.read_json()
|
||||
assert "continued" == session.read_json()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('start_method', ['launch'])
|
||||
@pytest.mark.skip(reason='Bug #1052')
|
||||
@pytest.mark.parametrize("start_method", ["launch"])
|
||||
@pytest.mark.skip(reason="Bug #1052")
|
||||
def test_exit_on_disconnect_for_launch(pyfile, start_method, run_as):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
import os.path
|
||||
|
||||
fp = os.join(os.path.dirname(os.path.abspath(__file__)), 'here.txt')
|
||||
fp = os.join(os.path.dirname(os.path.abspath(__file__)), "here.txt") # @bp
|
||||
# should not execute this
|
||||
with open(fp, 'w') as f:
|
||||
print('Should not continue after disconnect on launch', file=f)
|
||||
with open(fp, "w") as f:
|
||||
print("Should not continue after disconnect on launch", file=f)
|
||||
|
||||
bp_line = 4
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
use_backchannel=True,
|
||||
expected_returncode=ANY.int,
|
||||
)
|
||||
session.set_breakpoints(code_to_debug, [bp_line])
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
use_backchannel=True,
|
||||
expected_returncode=some.int,
|
||||
)
|
||||
session.set_breakpoints(code_to_debug, code_to_debug.lines["bp"])
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped('breakpoint')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert frames[0]['line'] == bp_line
|
||||
session.send_request('disconnect').wait_for_response()
|
||||
hit = session.wait_for_thread_stopped("breakpoint")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert frames[0]["line"] == code_to_debug.lines["bp"]
|
||||
session.send_request("disconnect").wait_for_response()
|
||||
session.wait_for_exit()
|
||||
fp = os.join(os.path.dirname(os.path.abspath(code_to_debug)), 'here.txt')
|
||||
fp = os.join(os.path.dirname(os.path.abspath(code_to_debug)), "here.txt")
|
||||
assert not os.path.exists(fp)
|
||||
|
|
|
|||
|
|
@ -5,71 +5,80 @@
|
|||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import pytest
|
||||
|
||||
from tests import debug, net, test_data
|
||||
from tests import code, debug, net, test_data
|
||||
from tests.patterns import some
|
||||
from tests.timeline import Event
|
||||
from tests.net import find_http_url
|
||||
|
||||
|
||||
DJANGO1_ROOT = test_data / "django1"
|
||||
DJANGO1_MANAGE = DJANGO1_ROOT / 'app.py'
|
||||
DJANGO1_TEMPLATE = DJANGO1_ROOT / 'templates' / 'hello.html'
|
||||
DJANGO1_BAD_TEMPLATE = DJANGO1_ROOT / 'templates' / 'bad.html'
|
||||
DJANGO1_MANAGE = DJANGO1_ROOT / "app.py"
|
||||
DJANGO1_TEMPLATE = DJANGO1_ROOT / "templates" / "hello.html"
|
||||
DJANGO1_BAD_TEMPLATE = DJANGO1_ROOT / "templates" / "bad.html"
|
||||
DJANGO_PORT = net.get_test_server_port(8000, 8100)
|
||||
|
||||
django = net.WebServer(DJANGO_PORT)
|
||||
app_py_lines = code.get_marked_line_numbers(DJANGO1_MANAGE)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('bp_target', ['code', 'template'])
|
||||
@pytest.mark.parametrize('start_method', ['launch', 'attach_socket_cmdline'])
|
||||
@pytest.mark.parametrize("bp_target", ["code", "template"])
|
||||
@pytest.mark.parametrize("start_method", ["launch", "attach_socket_cmdline"])
|
||||
@pytest.mark.timeout(60)
|
||||
def test_django_breakpoint_no_multiproc(start_method, bp_target):
|
||||
bp_file, bp_line, bp_name = {
|
||||
'code': (DJANGO1_MANAGE, 40, 'home'),
|
||||
'template': (DJANGO1_TEMPLATE, 8, 'Django Template'),
|
||||
"code": (DJANGO1_MANAGE, app_py_lines["bphome"], "home"),
|
||||
"template": (DJANGO1_TEMPLATE, 8, "Django Template"),
|
||||
}[bp_target]
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
start_method=start_method,
|
||||
target=('file', DJANGO1_MANAGE),
|
||||
program_args=['runserver', '--noreload', '--', str(DJANGO_PORT)],
|
||||
debug_options=['Django'],
|
||||
target=("file", DJANGO1_MANAGE),
|
||||
program_args=["runserver", "--noreload", "--", str(DJANGO_PORT)],
|
||||
debug_options=["Django"],
|
||||
cwd=DJANGO1_ROOT,
|
||||
expected_returncode=ANY.int, # No clean way to kill Django server
|
||||
expected_returncode=some.int, # No clean way to kill Django server
|
||||
)
|
||||
|
||||
bp_var_content = 'Django-Django-Test'
|
||||
bp_var_content = "Django-Django-Test"
|
||||
session.set_breakpoints(bp_file, [bp_line])
|
||||
session.start_debugging()
|
||||
with django:
|
||||
home_request = django.get('home')
|
||||
stop = session.wait_for_stop('breakpoint', [{
|
||||
'id': ANY.dap_id,
|
||||
'name': bp_name,
|
||||
'source': {
|
||||
'sourceReference': ANY,
|
||||
'path': Path(bp_file),
|
||||
},
|
||||
'line': bp_line,
|
||||
'column': 1,
|
||||
}])
|
||||
home_request = django.get("home")
|
||||
stop = session.wait_for_stop(
|
||||
"breakpoint",
|
||||
[
|
||||
{
|
||||
"id": some.dap_id,
|
||||
"name": bp_name,
|
||||
"source": {
|
||||
"sourceReference": some.str,
|
||||
"path": some.path(bp_file),
|
||||
},
|
||||
"line": bp_line,
|
||||
"column": 1,
|
||||
}
|
||||
],
|
||||
)
|
||||
|
||||
scopes = session.request('scopes', arguments={'frameId': stop.frame_id})
|
||||
scopes = session.request("scopes", arguments={"frameId": stop.frame_id})
|
||||
assert len(scopes) > 0
|
||||
|
||||
variables = session.request('variables', arguments={
|
||||
'variablesReference': scopes[0]['variablesReference']
|
||||
})
|
||||
variables = [v for v in variables['variables'] if v['name'] == 'content']
|
||||
assert variables == [{
|
||||
'name': 'content',
|
||||
'type': 'str',
|
||||
'value': repr(bp_var_content),
|
||||
'presentationHint': {'attributes': ['rawString']},
|
||||
'evaluateName': 'content',
|
||||
'variablesReference': 0,
|
||||
}]
|
||||
variables = session.request(
|
||||
"variables",
|
||||
arguments={"variablesReference": scopes[0]["variablesReference"]},
|
||||
)
|
||||
variables = [v for v in variables["variables"] if v["name"] == "content"]
|
||||
assert variables == [
|
||||
{
|
||||
"name": "content",
|
||||
"type": "str",
|
||||
"value": repr(bp_var_content),
|
||||
"presentationHint": {"attributes": ["rawString"]},
|
||||
"evaluateName": "content",
|
||||
"variablesReference": 0,
|
||||
}
|
||||
]
|
||||
|
||||
session.send_continue()
|
||||
assert bp_var_content in home_request.response_text()
|
||||
|
|
@ -77,61 +86,76 @@ def test_django_breakpoint_no_multiproc(start_method, bp_target):
|
|||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('start_method', ['launch', 'attach_socket_cmdline'])
|
||||
@pytest.mark.parametrize("start_method", ["launch", "attach_socket_cmdline"])
|
||||
@pytest.mark.timeout(60)
|
||||
def test_django_template_exception_no_multiproc(start_method):
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
start_method=start_method,
|
||||
target=('file', DJANGO1_MANAGE),
|
||||
program_args=['runserver', '--noreload', '--nothreading', str(DJANGO_PORT)],
|
||||
debug_options=['Django'],
|
||||
target=("file", DJANGO1_MANAGE),
|
||||
program_args=["runserver", "--noreload", "--nothreading", str(DJANGO_PORT)],
|
||||
debug_options=["Django"],
|
||||
cwd=DJANGO1_ROOT,
|
||||
expected_returncode=ANY.int, # No clean way to kill Django server
|
||||
expected_returncode=some.int, # No clean way to kill Django server
|
||||
)
|
||||
|
||||
session.send_request('setExceptionBreakpoints', arguments={
|
||||
'filters': ['raised', 'uncaught'],
|
||||
}).wait_for_response()
|
||||
session.send_request(
|
||||
"setExceptionBreakpoints", arguments={"filters": ["raised", "uncaught"]}
|
||||
).wait_for_response()
|
||||
|
||||
session.start_debugging()
|
||||
with django:
|
||||
web_request = django.get('badtemplate')
|
||||
web_request = django.get("badtemplate")
|
||||
|
||||
hit = session.wait_for_thread_stopped(reason='exception')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert frames[0] == ANY.dict_with({
|
||||
'id': ANY.dap_id,
|
||||
'name': 'Django TemplateSyntaxError',
|
||||
'source': ANY.dict_with({
|
||||
'sourceReference': ANY.dap_id,
|
||||
'path': Path(DJANGO1_BAD_TEMPLATE),
|
||||
}),
|
||||
'line': 8,
|
||||
'column': 1,
|
||||
})
|
||||
hit = session.wait_for_thread_stopped(reason="exception")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert frames[0] == some.dict.containing(
|
||||
{
|
||||
"id": some.dap_id,
|
||||
"name": "Django TemplateSyntaxError",
|
||||
"source": some.dict.containing(
|
||||
{
|
||||
"sourceReference": some.dap_id,
|
||||
"path": some.path(DJANGO1_BAD_TEMPLATE),
|
||||
}
|
||||
),
|
||||
"line": 8,
|
||||
"column": 1,
|
||||
}
|
||||
)
|
||||
|
||||
# Will stop once in the plugin
|
||||
resp_exception_info = session.send_request(
|
||||
'exceptionInfo',
|
||||
arguments={'threadId': hit.thread_id, }
|
||||
"exceptionInfo", arguments={"threadId": hit.thread_id}
|
||||
).wait_for_response()
|
||||
exception = resp_exception_info.body
|
||||
assert exception == ANY.dict_with({
|
||||
'exceptionId': ANY.such_that(lambda s: s.endswith('TemplateSyntaxError')),
|
||||
'breakMode': 'always',
|
||||
'description': ANY.such_that(lambda s: s.find('doesnotexist') > -1),
|
||||
'details': ANY.dict_with({
|
||||
'message': ANY.such_that(lambda s: s.endswith('doesnotexist') > -1),
|
||||
'typeName': ANY.such_that(lambda s: s.endswith('TemplateSyntaxError')),
|
||||
})
|
||||
})
|
||||
assert exception == some.dict.containing(
|
||||
{
|
||||
"exceptionId": some.str.such_that(
|
||||
lambda s: s.endswith("TemplateSyntaxError")
|
||||
),
|
||||
"breakMode": "always",
|
||||
"description": some.str.such_that(
|
||||
lambda s: s.find("doesnotexist") > -1
|
||||
),
|
||||
"details": some.dict_with(
|
||||
{
|
||||
"message": some.str.such_that(
|
||||
lambda s: s.endswith("doesnotexist") > -1
|
||||
),
|
||||
"typeName": some.str.such_that(
|
||||
lambda s: s.endswith("TemplateSyntaxError")
|
||||
),
|
||||
}
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
# And a second time when the exception reaches the user code.
|
||||
hit = session.wait_for_thread_stopped(reason='exception')
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
hit = session.wait_for_thread_stopped(reason="exception")
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
# ignore response for exception tests
|
||||
web_request.wait_for_response()
|
||||
|
|
@ -139,174 +163,185 @@ def test_django_template_exception_no_multiproc(start_method):
|
|||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('ex_type', ['handled', 'unhandled'])
|
||||
@pytest.mark.parametrize('start_method', ['launch', 'attach_socket_cmdline'])
|
||||
@pytest.mark.parametrize("ex_type", ["handled", "unhandled"])
|
||||
@pytest.mark.parametrize("start_method", ["launch", "attach_socket_cmdline"])
|
||||
@pytest.mark.timeout(60)
|
||||
def test_django_exception_no_multiproc(ex_type, start_method):
|
||||
ex_line = {
|
||||
'handled': 50,
|
||||
'unhandled': 64,
|
||||
}[ex_type]
|
||||
ex_line = {"handled": 50, "unhandled": 64}[ex_type]
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
start_method=start_method,
|
||||
target=('file', DJANGO1_MANAGE),
|
||||
program_args=['runserver', '--noreload', '--nothreading', str(DJANGO_PORT)],
|
||||
debug_options=['Django'],
|
||||
target=("file", DJANGO1_MANAGE),
|
||||
program_args=["runserver", "--noreload", "--nothreading", str(DJANGO_PORT)],
|
||||
debug_options=["Django"],
|
||||
cwd=DJANGO1_ROOT,
|
||||
expected_returncode=ANY.int, # No clean way to kill Django server
|
||||
expected_returncode=some.int, # No clean way to kill Django server
|
||||
)
|
||||
|
||||
session.send_request('setExceptionBreakpoints', arguments={
|
||||
'filters': ['raised', 'uncaught'],
|
||||
}).wait_for_response()
|
||||
session.send_request(
|
||||
"setExceptionBreakpoints", arguments={"filters": ["raised", "uncaught"]}
|
||||
).wait_for_response()
|
||||
|
||||
session.start_debugging()
|
||||
with django:
|
||||
web_request = django.get(ex_type)
|
||||
|
||||
wait_for_connection(DJANGO_PORT)
|
||||
thread_stopped = session.wait_for_next(
|
||||
Event("stopped", some.dict.containing({"reason": "exception"}))
|
||||
)
|
||||
assert thread_stopped == Event(
|
||||
"stopped",
|
||||
some.dict.containing(
|
||||
{
|
||||
"reason": "exception",
|
||||
"text": some.str.such_that(
|
||||
lambda s: s.endswith("ArithmeticError")
|
||||
),
|
||||
"description": "Hello",
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
link = DJANGO_LINK + ex_type
|
||||
web_request = get_web_content(link, {})
|
||||
|
||||
thread_stopped = session.wait_for_next(Event('stopped', ANY.dict_with({'reason': 'exception'})))
|
||||
assert thread_stopped == Event('stopped', ANY.dict_with({
|
||||
'reason': 'exception',
|
||||
'text': ANY.such_that(lambda s: s.endswith('ArithmeticError')),
|
||||
'description': 'Hello'
|
||||
}))
|
||||
|
||||
tid = thread_stopped.body['threadId']
|
||||
resp_exception_info = session.send_request(
|
||||
'exceptionInfo',
|
||||
arguments={'threadId': tid, }
|
||||
).wait_for_response()
|
||||
exception = resp_exception_info.body
|
||||
assert exception == {
|
||||
'exceptionId': ANY.such_that(lambda s: s.endswith('ArithmeticError')),
|
||||
'breakMode': 'always',
|
||||
'description': 'Hello',
|
||||
'details': {
|
||||
'message': 'Hello',
|
||||
'typeName': ANY.such_that(lambda s: s.endswith('ArithmeticError')),
|
||||
'source': Path(DJANGO1_MANAGE),
|
||||
'stackTrace': ANY.such_that(lambda s: True),
|
||||
tid = thread_stopped.body["threadId"]
|
||||
resp_exception_info = session.send_request(
|
||||
"exceptionInfo", arguments={"threadId": tid}
|
||||
).wait_for_response()
|
||||
exception = resp_exception_info.body
|
||||
assert exception == {
|
||||
"exceptionId": some.str.such_that(
|
||||
lambda s: s.endswith("ArithmeticError")
|
||||
),
|
||||
"breakMode": "always",
|
||||
"description": "Hello",
|
||||
"details": {
|
||||
"message": "Hello",
|
||||
"typeName": some.str.such_that(
|
||||
lambda s: s.endswith("ArithmeticError")
|
||||
),
|
||||
"source": some.path(DJANGO1_MANAGE),
|
||||
"stackTrace": some.str.such_that(lambda s: True),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
resp_stacktrace = session.send_request('stackTrace', arguments={
|
||||
'threadId': tid,
|
||||
}).wait_for_response()
|
||||
assert resp_stacktrace.body['totalFrames'] > 1
|
||||
frames = resp_stacktrace.body['stackFrames']
|
||||
assert frames[0] == {
|
||||
'id': ANY.dap_id,
|
||||
'name': 'bad_route_' + ex_type,
|
||||
'source': {
|
||||
'sourceReference': ANY.dap_id,
|
||||
'path': Path(DJANGO1_MANAGE),
|
||||
},
|
||||
'line': ex_line,
|
||||
'column': 1,
|
||||
}
|
||||
resp_stacktrace = session.send_request(
|
||||
"stackTrace", arguments={"threadId": tid}
|
||||
).wait_for_response()
|
||||
assert resp_stacktrace.body["totalFrames"] > 1
|
||||
frames = resp_stacktrace.body["stackFrames"]
|
||||
assert frames[0] == {
|
||||
"id": some.dap_id,
|
||||
"name": "bad_route_" + ex_type,
|
||||
"source": {
|
||||
"sourceReference": some.dap_id,
|
||||
"path": some.path(DJANGO1_MANAGE),
|
||||
},
|
||||
"line": ex_line,
|
||||
"column": 1,
|
||||
}
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
# ignore response for exception tests
|
||||
web_request.wait_for_response()
|
||||
|
||||
# shutdown to web server
|
||||
link = DJANGO_LINK + 'exit'
|
||||
get_web_content(link).wait_for_response()
|
||||
# ignore response for exception tests
|
||||
web_request.wait_for_response()
|
||||
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.skip()
|
||||
@pytest.mark.timeout(120)
|
||||
@pytest.mark.parametrize('start_method', ['launch'])
|
||||
@pytest.mark.parametrize("start_method", ["launch"])
|
||||
def test_django_breakpoint_multiproc(start_method):
|
||||
with debug.Session() as parent_session:
|
||||
parent_session.initialize(
|
||||
start_method=start_method,
|
||||
target=('file', DJANGO1_MANAGE),
|
||||
target=("file", DJANGO1_MANAGE),
|
||||
multiprocess=True,
|
||||
program_args=['runserver'],
|
||||
debug_options=['Django'],
|
||||
program_args=["runserver"],
|
||||
debug_options=["Django"],
|
||||
cwd=DJANGO1_ROOT,
|
||||
ignore_unobserved=[Event('stopped')],
|
||||
expected_returncode=ANY.int, # No clean way to kill Django server
|
||||
ignore_unobserved=[Event("stopped")],
|
||||
expected_returncode=some.int, # No clean way to kill Django server
|
||||
)
|
||||
|
||||
bp_line = 40
|
||||
bp_var_content = 'Django-Django-Test'
|
||||
bp_line = app_py_lines["bphome"]
|
||||
bp_var_content = "Django-Django-Test"
|
||||
parent_session.set_breakpoints(DJANGO1_MANAGE, [bp_line])
|
||||
parent_session.start_debugging()
|
||||
|
||||
with parent_session.connect_to_next_child_session() as child_session:
|
||||
child_session.send_request('setBreakpoints', arguments={
|
||||
'source': {'path': DJANGO1_MANAGE},
|
||||
'breakpoints': [{'line': bp_line}, ],
|
||||
}).wait_for_response()
|
||||
child_session.send_request(
|
||||
"setBreakpoints",
|
||||
arguments={
|
||||
"source": {"path": DJANGO1_MANAGE},
|
||||
"breakpoints": [{"line": bp_line}],
|
||||
},
|
||||
).wait_for_response()
|
||||
child_session.start_debugging()
|
||||
|
||||
# wait for Django server to start
|
||||
while True:
|
||||
child_session.proceed()
|
||||
o = child_session.wait_for_next(Event('output'))
|
||||
if get_url_from_str(o.body['output']) is not None:
|
||||
o = child_session.wait_for_next(Event("output"))
|
||||
if find_http_url(o.body["output"]) is not None:
|
||||
break
|
||||
|
||||
web_request = get_web_content(DJANGO_LINK + 'home', {})
|
||||
with django:
|
||||
web_request = django.get("home")
|
||||
|
||||
thread_stopped = child_session.wait_for_next(Event('stopped', ANY.dict_with({'reason': 'breakpoint'})))
|
||||
assert thread_stopped.body['threadId'] is not None
|
||||
thread_stopped = child_session.wait_for_next(
|
||||
Event("stopped", some.dict.containing({"reason": "breakpoint"}))
|
||||
)
|
||||
assert thread_stopped.body["threadId"] is not None
|
||||
|
||||
tid = thread_stopped.body['threadId']
|
||||
tid = thread_stopped.body["threadId"]
|
||||
|
||||
resp_stacktrace = child_session.send_request('stackTrace', arguments={
|
||||
'threadId': tid,
|
||||
}).wait_for_response()
|
||||
assert resp_stacktrace.body['totalFrames'] > 0
|
||||
frames = resp_stacktrace.body['stackFrames']
|
||||
assert frames[0] == {
|
||||
'id': ANY.dap_id,
|
||||
'name': 'home',
|
||||
'source': {
|
||||
'sourceReference': ANY.dap_id,
|
||||
'path': Path(DJANGO1_MANAGE),
|
||||
},
|
||||
'line': bp_line,
|
||||
'column': 1,
|
||||
}
|
||||
resp_stacktrace = child_session.send_request(
|
||||
"stackTrace", arguments={"threadId": tid}
|
||||
).wait_for_response()
|
||||
assert resp_stacktrace.body["totalFrames"] > 0
|
||||
frames = resp_stacktrace.body["stackFrames"]
|
||||
assert frames[0] == {
|
||||
"id": some.dap_id,
|
||||
"name": "home",
|
||||
"source": {
|
||||
"sourceReference": some.dap_id,
|
||||
"path": some.path(DJANGO1_MANAGE),
|
||||
},
|
||||
"line": bp_line,
|
||||
"column": 1,
|
||||
}
|
||||
|
||||
fid = frames[0]['id']
|
||||
resp_scopes = child_session.send_request('scopes', arguments={
|
||||
'frameId': fid
|
||||
}).wait_for_response()
|
||||
scopes = resp_scopes.body['scopes']
|
||||
assert len(scopes) > 0
|
||||
fid = frames[0]["id"]
|
||||
resp_scopes = child_session.send_request(
|
||||
"scopes", arguments={"frameId": fid}
|
||||
).wait_for_response()
|
||||
scopes = resp_scopes.body["scopes"]
|
||||
assert len(scopes) > 0
|
||||
|
||||
resp_variables = child_session.send_request('variables', arguments={
|
||||
'variablesReference': scopes[0]['variablesReference']
|
||||
}).wait_for_response()
|
||||
variables = list(v for v in resp_variables.body['variables'] if v['name'] == 'content')
|
||||
assert variables == [{
|
||||
'name': 'content',
|
||||
'type': 'str',
|
||||
'value': repr(bp_var_content),
|
||||
'presentationHint': {'attributes': ['rawString']},
|
||||
'evaluateName': 'content'
|
||||
}]
|
||||
resp_variables = child_session.send_request(
|
||||
"variables",
|
||||
arguments={"variablesReference": scopes[0]["variablesReference"]},
|
||||
).wait_for_response()
|
||||
variables = list(
|
||||
v
|
||||
for v in resp_variables.body["variables"]
|
||||
if v["name"] == "content"
|
||||
)
|
||||
assert variables == [
|
||||
{
|
||||
"name": "content",
|
||||
"type": "str",
|
||||
"value": repr(bp_var_content),
|
||||
"presentationHint": {"attributes": ["rawString"]},
|
||||
"evaluateName": "content",
|
||||
}
|
||||
]
|
||||
|
||||
child_session.send_request('continue').wait_for_response(freeze=False)
|
||||
child_session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
web_content = web_request.wait_for_response()
|
||||
assert web_content.find(bp_var_content) != -1
|
||||
|
||||
# shutdown to web server
|
||||
link = DJANGO_LINK + 'exit'
|
||||
get_web_content(link).wait_for_response()
|
||||
web_content = web_request.wait_for_response()
|
||||
assert web_content.find(bp_var_content) != -1
|
||||
|
||||
child_session.wait_for_termination()
|
||||
parent_session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -14,95 +14,92 @@ def test_variables_and_evaluate(pyfile, start_method, run_as):
|
|||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
||||
a = 1
|
||||
b = {"one": 1, "two": 2}
|
||||
c = 3
|
||||
print([a, b, c])
|
||||
|
||||
bp_line = 6
|
||||
bp_file = code_to_debug
|
||||
print([a, b, c]) # @bp
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, bp_file),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.set_breakpoints(bp_file, [bp_line])
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["bp"]])
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped()
|
||||
|
||||
resp_scopes = session.send_request('scopes', arguments={
|
||||
'frameId': hit.frame_id,
|
||||
}).wait_for_response()
|
||||
scopes = resp_scopes.body['scopes']
|
||||
resp_scopes = session.send_request(
|
||||
"scopes", arguments={"frameId": hit.frame_id}
|
||||
).wait_for_response()
|
||||
scopes = resp_scopes.body["scopes"]
|
||||
assert len(scopes) > 0
|
||||
|
||||
resp_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': scopes[0]['variablesReference']
|
||||
}).wait_for_response()
|
||||
variables = list(v for v in resp_variables.body['variables'] if v['name'] in ['a', 'b', 'c'])
|
||||
resp_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={"variablesReference": scopes[0]["variablesReference"]},
|
||||
).wait_for_response()
|
||||
variables = list(
|
||||
v for v in resp_variables.body["variables"] if v["name"] in ["a", "b", "c"]
|
||||
)
|
||||
assert len(variables) == 3
|
||||
|
||||
# variables should be sorted alphabetically
|
||||
assert ['a', 'b', 'c'] == list(v['name'] for v in variables)
|
||||
assert ["a", "b", "c"] == list(v["name"] for v in variables)
|
||||
|
||||
# get contents of 'b'
|
||||
resp_b_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': variables[1]['variablesReference']
|
||||
}).wait_for_response()
|
||||
b_variables = resp_b_variables.body['variables']
|
||||
resp_b_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={"variablesReference": variables[1]["variablesReference"]},
|
||||
).wait_for_response()
|
||||
b_variables = resp_b_variables.body["variables"]
|
||||
assert len(b_variables) == 3
|
||||
assert b_variables[0] == {
|
||||
'type': 'int',
|
||||
'value': '1',
|
||||
'name': ANY.such_that(lambda x: x.find('one') > 0),
|
||||
'evaluateName': "b['one']",
|
||||
'variablesReference': 0,
|
||||
"type": "int",
|
||||
"value": "1",
|
||||
"name": some.str.such_that(lambda x: x.find("one") > 0),
|
||||
"evaluateName": "b['one']",
|
||||
"variablesReference": 0,
|
||||
}
|
||||
assert b_variables[1] == {
|
||||
'type': 'int',
|
||||
'value': '2',
|
||||
'name': ANY.such_that(lambda x: x.find('two') > 0),
|
||||
'evaluateName': "b['two']",
|
||||
'variablesReference': 0,
|
||||
"type": "int",
|
||||
"value": "2",
|
||||
"name": some.str.such_that(lambda x: x.find("two") > 0),
|
||||
"evaluateName": "b['two']",
|
||||
"variablesReference": 0,
|
||||
}
|
||||
assert b_variables[2] == {
|
||||
'type': 'int',
|
||||
'value': '2',
|
||||
'name': '__len__',
|
||||
'evaluateName': "len(b)",
|
||||
'variablesReference': 0,
|
||||
'presentationHint': {'attributes': ['readOnly']},
|
||||
"type": "int",
|
||||
"value": "2",
|
||||
"name": "__len__",
|
||||
"evaluateName": "len(b)",
|
||||
"variablesReference": 0,
|
||||
"presentationHint": {"attributes": ["readOnly"]},
|
||||
}
|
||||
|
||||
# simple variable
|
||||
resp_evaluate1 = session.send_request('evaluate', arguments={
|
||||
'expression': 'a', 'frameId': hit.frame_id,
|
||||
}).wait_for_response()
|
||||
assert resp_evaluate1.body == ANY.dict_with({
|
||||
'type': 'int',
|
||||
'result': '1'
|
||||
})
|
||||
resp_evaluate1 = session.send_request(
|
||||
"evaluate", arguments={"expression": "a", "frameId": hit.frame_id}
|
||||
).wait_for_response()
|
||||
assert resp_evaluate1.body == some.dict.containing(
|
||||
{"type": "int", "result": "1"}
|
||||
)
|
||||
|
||||
# dict variable
|
||||
resp_evaluate2 = session.send_request('evaluate', arguments={
|
||||
'expression': 'b["one"]', 'frameId': hit.frame_id,
|
||||
}).wait_for_response()
|
||||
assert resp_evaluate2.body == ANY.dict_with({
|
||||
'type': 'int',
|
||||
'result': '1'
|
||||
})
|
||||
resp_evaluate2 = session.send_request(
|
||||
"evaluate", arguments={"expression": 'b["one"]', "frameId": hit.frame_id}
|
||||
).wait_for_response()
|
||||
assert resp_evaluate2.body == some.dict.containing(
|
||||
{"type": "int", "result": "1"}
|
||||
)
|
||||
|
||||
# expression evaluate
|
||||
resp_evaluate3 = session.send_request('evaluate', arguments={
|
||||
'expression': 'a + b["one"]', 'frameId': hit.frame_id,
|
||||
}).wait_for_response()
|
||||
assert resp_evaluate3.body == ANY.dict_with({
|
||||
'type': 'int',
|
||||
'result': '2'
|
||||
})
|
||||
resp_evaluate3 = session.send_request(
|
||||
"evaluate",
|
||||
arguments={"expression": 'a + b["one"]', "frameId": hit.frame_id},
|
||||
).wait_for_response()
|
||||
assert resp_evaluate3.body == some.dict.containing(
|
||||
{"type": "int", "result": "2"}
|
||||
)
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
|
|
@ -110,6 +107,7 @@ def test_set_variable(pyfile, start_method, run_as):
|
|||
@pyfile
|
||||
def code_to_debug():
|
||||
from debug_me import backchannel, ptvsd
|
||||
|
||||
a = 1
|
||||
ptvsd.break_into_debugger()
|
||||
backchannel.write_json(a)
|
||||
|
|
@ -123,36 +121,41 @@ def test_set_variable(pyfile, start_method, run_as):
|
|||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped()
|
||||
|
||||
resp_scopes = session.send_request('scopes', arguments={
|
||||
'frameId': hit.frame_id
|
||||
}).wait_for_response()
|
||||
scopes = resp_scopes.body['scopes']
|
||||
resp_scopes = session.send_request(
|
||||
"scopes", arguments={"frameId": hit.frame_id}
|
||||
).wait_for_response()
|
||||
scopes = resp_scopes.body["scopes"]
|
||||
assert len(scopes) > 0
|
||||
|
||||
resp_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': scopes[0]['variablesReference']
|
||||
}).wait_for_response()
|
||||
variables = list(v for v in resp_variables.body['variables'] if v['name'] == 'a')
|
||||
resp_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={"variablesReference": scopes[0]["variablesReference"]},
|
||||
).wait_for_response()
|
||||
variables = list(
|
||||
v for v in resp_variables.body["variables"] if v["name"] == "a"
|
||||
)
|
||||
assert len(variables) == 1
|
||||
assert variables[0] == {
|
||||
'type': 'int',
|
||||
'value': '1',
|
||||
'name': 'a',
|
||||
'evaluateName': "a",
|
||||
'variablesReference': 0,
|
||||
"type": "int",
|
||||
"value": "1",
|
||||
"name": "a",
|
||||
"evaluateName": "a",
|
||||
"variablesReference": 0,
|
||||
}
|
||||
|
||||
resp_set_variable = session.send_request('setVariable', arguments={
|
||||
'variablesReference': scopes[0]['variablesReference'],
|
||||
'name': 'a',
|
||||
'value': '1000'
|
||||
}).wait_for_response()
|
||||
assert resp_set_variable.body == ANY.dict_with({
|
||||
'type': 'int',
|
||||
'value': '1000'
|
||||
})
|
||||
resp_set_variable = session.send_request(
|
||||
"setVariable",
|
||||
arguments={
|
||||
"variablesReference": scopes[0]["variablesReference"],
|
||||
"name": "a",
|
||||
"value": "1000",
|
||||
},
|
||||
).wait_for_response()
|
||||
assert resp_set_variable.body == some.dict.containing(
|
||||
{"type": "int", "value": "1000"}
|
||||
)
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
assert session.read_json() == 1000
|
||||
|
||||
|
|
@ -160,10 +163,10 @@ def test_set_variable(pyfile, start_method, run_as):
|
|||
|
||||
|
||||
def test_variable_sort(pyfile, start_method, run_as):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
||||
b_test = {"spam": "A", "eggs": "B", "abcd": "C"} # noqa
|
||||
_b_test = 12 # noqa
|
||||
__b_test = 13 # noqa
|
||||
|
|
@ -177,147 +180,163 @@ def test_variable_sort(pyfile, start_method, run_as):
|
|||
__c_test = 23 # noqa
|
||||
__c_test__ = 24 # noqa
|
||||
d = 3 # noqa
|
||||
print('done')
|
||||
|
||||
bp_line = 15
|
||||
bp_file = code_to_debug
|
||||
print("done") # @bp
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, bp_file),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.set_breakpoints(bp_file, [bp_line])
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["bp"]])
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped()
|
||||
|
||||
resp_scopes = session.send_request('scopes', arguments={
|
||||
'frameId': hit.frame_id
|
||||
}).wait_for_response()
|
||||
scopes = resp_scopes.body['scopes']
|
||||
resp_scopes = session.send_request(
|
||||
"scopes", arguments={"frameId": hit.frame_id}
|
||||
).wait_for_response()
|
||||
scopes = resp_scopes.body["scopes"]
|
||||
assert len(scopes) > 0
|
||||
|
||||
resp_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': scopes[0]['variablesReference']
|
||||
}).wait_for_response()
|
||||
resp_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={"variablesReference": scopes[0]["variablesReference"]},
|
||||
).wait_for_response()
|
||||
variable_names = list(
|
||||
v['name']
|
||||
for v in resp_variables.body['variables']
|
||||
if v['name'].find('_test') > 0
|
||||
v["name"]
|
||||
for v in resp_variables.body["variables"]
|
||||
if v["name"].find("_test") > 0
|
||||
)
|
||||
assert variable_names == [
|
||||
'a_test', 'b_test', 'c_test', '_a_test', '_b_test', '_c_test',
|
||||
'__a_test', '__b_test', '__c_test', '__a_test__', '__b_test__',
|
||||
'__c_test__'
|
||||
]
|
||||
"a_test",
|
||||
"b_test",
|
||||
"c_test",
|
||||
"_a_test",
|
||||
"_b_test",
|
||||
"_c_test",
|
||||
"__a_test",
|
||||
"__b_test",
|
||||
"__c_test",
|
||||
"__a_test__",
|
||||
"__b_test__",
|
||||
"__c_test__",
|
||||
]
|
||||
|
||||
# ensure string dict keys are sorted
|
||||
b_test_variable = list(v for v in resp_variables.body['variables'] if v['name'] == 'b_test')
|
||||
b_test_variable = list(
|
||||
v for v in resp_variables.body["variables"] if v["name"] == "b_test"
|
||||
)
|
||||
assert len(b_test_variable) == 1
|
||||
resp_dict_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': b_test_variable[0]['variablesReference']
|
||||
}).wait_for_response()
|
||||
variable_names = list(v['name'][1:5] for v in resp_dict_variables.body['variables'])
|
||||
resp_dict_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={"variablesReference": b_test_variable[0]["variablesReference"]},
|
||||
).wait_for_response()
|
||||
variable_names = list(
|
||||
v["name"][1:5] for v in resp_dict_variables.body["variables"]
|
||||
)
|
||||
assert len(variable_names) == 4
|
||||
assert variable_names[:3] == ['abcd', 'eggs', 'spam']
|
||||
assert variable_names[:3] == ["abcd", "eggs", "spam"]
|
||||
|
||||
# ensure numeric dict keys are sorted
|
||||
c_test_variable = list(v for v in resp_variables.body['variables'] if v['name'] == 'c_test')
|
||||
c_test_variable = list(
|
||||
v for v in resp_variables.body["variables"] if v["name"] == "c_test"
|
||||
)
|
||||
assert len(c_test_variable) == 1
|
||||
resp_dict_variables2 = session.send_request('variables', arguments={
|
||||
'variablesReference': c_test_variable[0]['variablesReference']
|
||||
}).wait_for_response()
|
||||
variable_names = list(v['name'] for v in resp_dict_variables2.body['variables'])
|
||||
resp_dict_variables2 = session.send_request(
|
||||
"variables",
|
||||
arguments={"variablesReference": c_test_variable[0]["variablesReference"]},
|
||||
).wait_for_response()
|
||||
variable_names = list(v["name"] for v in resp_dict_variables2.body["variables"])
|
||||
assert len(variable_names) == 4
|
||||
# NOTE: this is commented out due to sorting bug #213
|
||||
# assert variable_names[:3] == ['1', '2', '10']
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
def test_return_values(pyfile, start_method, run_as):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
||||
class MyClass(object):
|
||||
def do_something(self):
|
||||
return 'did something'
|
||||
return "did something"
|
||||
|
||||
def my_func():
|
||||
return 'did more things'
|
||||
return "did more things"
|
||||
|
||||
MyClass().do_something() # @bp
|
||||
my_func()
|
||||
print('done')
|
||||
print("done")
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
print(line_numbers)
|
||||
expected1 = some.dict.containing(
|
||||
{
|
||||
"name": "(return) MyClass.do_something",
|
||||
"value": "'did something'",
|
||||
"type": "str",
|
||||
"presentationHint": some.dict.containing(
|
||||
{"attributes": some.str.such_that(lambda x: "readOnly" in x)}
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
expected1 = ANY.dict_with({
|
||||
'name': '(return) MyClass.do_something',
|
||||
'value': "'did something'",
|
||||
'type': 'str',
|
||||
'presentationHint': ANY.dict_with({
|
||||
'attributes': ANY.such_that(lambda x: 'readOnly' in x)
|
||||
}),
|
||||
})
|
||||
|
||||
expected2 = ANY.dict_with({
|
||||
'name': '(return) my_func',
|
||||
'value': "'did more things'",
|
||||
'type': 'str',
|
||||
'presentationHint': ANY.dict_with({
|
||||
'attributes': ANY.such_that(lambda x: 'readOnly' in x)
|
||||
}),
|
||||
})
|
||||
expected2 = some.dict.containing(
|
||||
{
|
||||
"name": "(return) my_func",
|
||||
"value": "'did more things'",
|
||||
"type": "str",
|
||||
"presentationHint": some.dict.containing(
|
||||
{"attributes": some.str.such_that(lambda x: "readOnly" in x)}
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
debug_options=['ShowReturnValue'],
|
||||
debug_options=["ShowReturnValue"],
|
||||
)
|
||||
session.set_breakpoints(code_to_debug, [line_numbers['bp']])
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["bp"]])
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped()
|
||||
|
||||
session.send_request('next', {'threadId': hit.thread_id}).wait_for_response()
|
||||
hit = session.wait_for_thread_stopped(reason='step')
|
||||
session.send_request("next", {"threadId": hit.thread_id}).wait_for_response()
|
||||
hit = session.wait_for_thread_stopped(reason="step")
|
||||
|
||||
resp_scopes = session.send_request('scopes', arguments={
|
||||
'frameId': hit.frame_id
|
||||
}).wait_for_response()
|
||||
scopes = resp_scopes.body['scopes']
|
||||
resp_scopes = session.send_request(
|
||||
"scopes", arguments={"frameId": hit.frame_id}
|
||||
).wait_for_response()
|
||||
scopes = resp_scopes.body["scopes"]
|
||||
assert len(scopes) > 0
|
||||
|
||||
resp_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': scopes[0]['variablesReference']
|
||||
}).wait_for_response()
|
||||
resp_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={"variablesReference": scopes[0]["variablesReference"]},
|
||||
).wait_for_response()
|
||||
variables = list(
|
||||
v for v in resp_variables.body['variables']
|
||||
if v['name'].startswith('(return)')
|
||||
v
|
||||
for v in resp_variables.body["variables"]
|
||||
if v["name"].startswith("(return)")
|
||||
)
|
||||
|
||||
assert variables == [expected1]
|
||||
|
||||
session.send_request('next', {'threadId': hit.thread_id}).wait_for_response()
|
||||
hit = session.wait_for_thread_stopped(reason='step')
|
||||
session.send_request("next", {"threadId": hit.thread_id}).wait_for_response()
|
||||
hit = session.wait_for_thread_stopped(reason="step")
|
||||
|
||||
# Scope should not have changed so use the same scope
|
||||
resp_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': scopes[0]['variablesReference']
|
||||
}).wait_for_response()
|
||||
resp_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={"variablesReference": scopes[0]["variablesReference"]},
|
||||
).wait_for_response()
|
||||
variables = list(
|
||||
v for v in resp_variables.body['variables']
|
||||
if v['name'].startswith('(return)')
|
||||
v
|
||||
for v in resp_variables.body["variables"]
|
||||
if v["name"].startswith("(return)")
|
||||
)
|
||||
|
||||
assert variables == [expected1, expected2]
|
||||
|
||||
session.send_request('continue').wait_for_response()
|
||||
session.send_request("continue").wait_for_response()
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
|
|
@ -328,35 +347,30 @@ def test_unicode(pyfile, start_method, run_as):
|
|||
@pyfile
|
||||
def code_to_debug():
|
||||
from debug_me import ptvsd
|
||||
|
||||
# Since Unicode variable name is a SyntaxError at parse time in Python 2,
|
||||
# this needs to do a roundabout way of setting it to avoid parse issues.
|
||||
globals()[u'\u16A0'] = 123
|
||||
globals()["\u16A0"] = 123
|
||||
ptvsd.break_into_debugger()
|
||||
print('break')
|
||||
print("break")
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped()
|
||||
|
||||
resp_eval = session.send_request('evaluate', arguments={
|
||||
'expression': '\u16A0', 'frameId': hit.frame_id,
|
||||
}).wait_for_response()
|
||||
resp_eval = session.send_request(
|
||||
"evaluate", arguments={"expression": "\u16A0", "frameId": hit.frame_id}
|
||||
).wait_for_response()
|
||||
|
||||
if sys.version_info >= (3,):
|
||||
assert resp_eval.body == ANY.dict_with({
|
||||
'type': 'int',
|
||||
'result': '123'
|
||||
})
|
||||
assert resp_eval.body == some.dict.containing(
|
||||
{"type": "int", "result": "123"}
|
||||
)
|
||||
else:
|
||||
assert resp_eval.body == ANY.dict_with({
|
||||
'type': 'SyntaxError'
|
||||
})
|
||||
assert resp_eval.body == some.dict.containing({"type": "SyntaxError"})
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
|
|
@ -364,113 +378,222 @@ def test_hex_numbers(pyfile, start_method, run_as):
|
|||
@pyfile
|
||||
def code_to_debug():
|
||||
import debug_me # noqa
|
||||
|
||||
a = 100
|
||||
b = [1, 10, 100]
|
||||
c = {10: 10, 100: 100, 1000: 1000}
|
||||
d = {(1, 10, 100): (10000, 100000, 100000)}
|
||||
print((a, b, c, d)) # @bp
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
print(line_numbers)
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.set_breakpoints(code_to_debug, [line_numbers['bp']])
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["bp"]])
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped()
|
||||
|
||||
resp_scopes = session.send_request('scopes', arguments={
|
||||
'frameId': hit.frame_id
|
||||
}).wait_for_response()
|
||||
scopes = resp_scopes.body['scopes']
|
||||
resp_scopes = session.send_request(
|
||||
"scopes", arguments={"frameId": hit.frame_id}
|
||||
).wait_for_response()
|
||||
scopes = resp_scopes.body["scopes"]
|
||||
assert len(scopes) > 0
|
||||
|
||||
resp_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': scopes[0]['variablesReference'],
|
||||
'format': {'hex': True}
|
||||
}).wait_for_response()
|
||||
variables = list(v for v in resp_variables.body['variables']
|
||||
if v['name'] in ('a', 'b', 'c', 'd'))
|
||||
a, b, c, d = sorted(variables, key=lambda v: v['name'])
|
||||
assert a == ANY.dict_with({
|
||||
'name': 'a',
|
||||
'value': "0x64",
|
||||
'type': 'int',
|
||||
'evaluateName': 'a',
|
||||
'variablesReference': 0,
|
||||
})
|
||||
resp_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={
|
||||
"variablesReference": scopes[0]["variablesReference"],
|
||||
"format": {"hex": True},
|
||||
},
|
||||
).wait_for_response()
|
||||
variables = list(
|
||||
v
|
||||
for v in resp_variables.body["variables"]
|
||||
if v["name"] in ("a", "b", "c", "d")
|
||||
)
|
||||
a, b, c, d = sorted(variables, key=lambda v: v["name"])
|
||||
assert a == some.dict.containing(
|
||||
{
|
||||
"name": "a",
|
||||
"value": "0x64",
|
||||
"type": "int",
|
||||
"evaluateName": "a",
|
||||
"variablesReference": 0,
|
||||
}
|
||||
)
|
||||
|
||||
assert b == ANY.dict_with({
|
||||
'name': 'b',
|
||||
'value': "[0x1, 0xa, 0x64]",
|
||||
'type': 'list',
|
||||
'evaluateName': 'b',
|
||||
'variablesReference': ANY.dap_id,
|
||||
})
|
||||
assert b == some.dict.containing(
|
||||
{
|
||||
"name": "b",
|
||||
"value": "[0x1, 0xa, 0x64]",
|
||||
"type": "list",
|
||||
"evaluateName": "b",
|
||||
"variablesReference": some.dap_id,
|
||||
}
|
||||
)
|
||||
|
||||
resp_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': b['variablesReference'],
|
||||
'format': {'hex': True}
|
||||
}).wait_for_response()
|
||||
b_children = resp_variables.body['variables']
|
||||
resp_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={
|
||||
"variablesReference": b["variablesReference"],
|
||||
"format": {"hex": True},
|
||||
},
|
||||
).wait_for_response()
|
||||
b_children = resp_variables.body["variables"]
|
||||
assert b_children == [
|
||||
{'name': '0x0', 'value': '0x1', 'type': 'int', 'evaluateName': 'b[0]', 'variablesReference': 0, },
|
||||
{'name': '0x1', 'value': '0xa', 'type': 'int', 'evaluateName': 'b[1]', 'variablesReference': 0, },
|
||||
{'name': '0x2', 'value': '0x64', 'type': 'int', 'evaluateName': 'b[2]', 'variablesReference': 0, },
|
||||
{'name': '__len__', 'value': '0x3', 'type': 'int', 'evaluateName': 'len(b)', 'variablesReference': 0, 'presentationHint': {'attributes': ['readOnly']}, },
|
||||
{
|
||||
"name": "0x0",
|
||||
"value": "0x1",
|
||||
"type": "int",
|
||||
"evaluateName": "b[0]",
|
||||
"variablesReference": 0,
|
||||
},
|
||||
{
|
||||
"name": "0x1",
|
||||
"value": "0xa",
|
||||
"type": "int",
|
||||
"evaluateName": "b[1]",
|
||||
"variablesReference": 0,
|
||||
},
|
||||
{
|
||||
"name": "0x2",
|
||||
"value": "0x64",
|
||||
"type": "int",
|
||||
"evaluateName": "b[2]",
|
||||
"variablesReference": 0,
|
||||
},
|
||||
{
|
||||
"name": "__len__",
|
||||
"value": "0x3",
|
||||
"type": "int",
|
||||
"evaluateName": "len(b)",
|
||||
"variablesReference": 0,
|
||||
"presentationHint": {"attributes": ["readOnly"]},
|
||||
},
|
||||
]
|
||||
|
||||
assert c == ANY.dict_with({
|
||||
'name': 'c',
|
||||
'value': '{0xa: 0xa, 0x64: 0x64, 0x3e8: 0x3e8}',
|
||||
'type': 'dict',
|
||||
'evaluateName': 'c',
|
||||
'variablesReference': ANY.dap_id,
|
||||
})
|
||||
assert c == some.dict.containing(
|
||||
{
|
||||
"name": "c",
|
||||
"value": "{0xa: 0xa, 0x64: 0x64, 0x3e8: 0x3e8}",
|
||||
"type": "dict",
|
||||
"evaluateName": "c",
|
||||
"variablesReference": some.dap_id,
|
||||
}
|
||||
)
|
||||
|
||||
resp_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': c['variablesReference'],
|
||||
'format': {'hex': True}
|
||||
}).wait_for_response()
|
||||
c_children = resp_variables.body['variables']
|
||||
resp_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={
|
||||
"variablesReference": c["variablesReference"],
|
||||
"format": {"hex": True},
|
||||
},
|
||||
).wait_for_response()
|
||||
c_children = resp_variables.body["variables"]
|
||||
assert c_children == [
|
||||
{'name': '0x3e8', 'value': '0x3e8', 'type': 'int', 'evaluateName': 'c[1000]', 'variablesReference': 0, },
|
||||
{'name': '0x64', 'value': '0x64', 'type': 'int', 'evaluateName': 'c[100]', 'variablesReference': 0, },
|
||||
{'name': '0xa', 'value': '0xa', 'type': 'int', 'evaluateName': 'c[10]', 'variablesReference': 0, },
|
||||
{'name': '__len__', 'value': '0x3', 'type': 'int', 'evaluateName': 'len(c)', 'variablesReference': 0, 'presentationHint': {'attributes': ['readOnly']}, }
|
||||
{
|
||||
"name": "0x3e8",
|
||||
"value": "0x3e8",
|
||||
"type": "int",
|
||||
"evaluateName": "c[1000]",
|
||||
"variablesReference": 0,
|
||||
},
|
||||
{
|
||||
"name": "0x64",
|
||||
"value": "0x64",
|
||||
"type": "int",
|
||||
"evaluateName": "c[100]",
|
||||
"variablesReference": 0,
|
||||
},
|
||||
{
|
||||
"name": "0xa",
|
||||
"value": "0xa",
|
||||
"type": "int",
|
||||
"evaluateName": "c[10]",
|
||||
"variablesReference": 0,
|
||||
},
|
||||
{
|
||||
"name": "__len__",
|
||||
"value": "0x3",
|
||||
"type": "int",
|
||||
"evaluateName": "len(c)",
|
||||
"variablesReference": 0,
|
||||
"presentationHint": {"attributes": ["readOnly"]},
|
||||
},
|
||||
]
|
||||
|
||||
assert d == ANY.dict_with({
|
||||
'name': 'd',
|
||||
'value': '{(0x1, 0xa, 0x64): (0x2710, 0x186a0, 0x186a0)}',
|
||||
'type': 'dict',
|
||||
'evaluateName': 'd',
|
||||
'variablesReference': ANY.dap_id,
|
||||
})
|
||||
resp_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': d['variablesReference'],
|
||||
'format': {'hex': True}
|
||||
}).wait_for_response()
|
||||
d_children = resp_variables.body['variables']
|
||||
assert d == some.dict.containing(
|
||||
{
|
||||
"name": "d",
|
||||
"value": "{(0x1, 0xa, 0x64): (0x2710, 0x186a0, 0x186a0)}",
|
||||
"type": "dict",
|
||||
"evaluateName": "d",
|
||||
"variablesReference": some.dap_id,
|
||||
}
|
||||
)
|
||||
resp_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={
|
||||
"variablesReference": d["variablesReference"],
|
||||
"format": {"hex": True},
|
||||
},
|
||||
).wait_for_response()
|
||||
d_children = resp_variables.body["variables"]
|
||||
assert d_children == [
|
||||
{'name': '(0x1, 0xa, 0x64)', 'value': '(0x2710, 0x186a0, 0x186a0)', 'type': 'tuple', 'evaluateName': 'd[(1, 10, 100)]', 'variablesReference': ANY.dap_id},
|
||||
{'name': '__len__', 'value': '0x1', 'type': 'int', 'evaluateName': 'len(d)', 'variablesReference': 0, 'presentationHint': {'attributes': ['readOnly']}, }
|
||||
{
|
||||
"name": "(0x1, 0xa, 0x64)",
|
||||
"value": "(0x2710, 0x186a0, 0x186a0)",
|
||||
"type": "tuple",
|
||||
"evaluateName": "d[(1, 10, 100)]",
|
||||
"variablesReference": some.dap_id,
|
||||
},
|
||||
{
|
||||
"name": "__len__",
|
||||
"value": "0x1",
|
||||
"type": "int",
|
||||
"evaluateName": "len(d)",
|
||||
"variablesReference": 0,
|
||||
"presentationHint": {"attributes": ["readOnly"]},
|
||||
},
|
||||
]
|
||||
|
||||
resp_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': d_children[0]['variablesReference'],
|
||||
'format': {'hex': True}
|
||||
}).wait_for_response()
|
||||
d_child_of_child = resp_variables.body['variables']
|
||||
resp_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={
|
||||
"variablesReference": d_children[0]["variablesReference"],
|
||||
"format": {"hex": True},
|
||||
},
|
||||
).wait_for_response()
|
||||
d_child_of_child = resp_variables.body["variables"]
|
||||
assert d_child_of_child == [
|
||||
{'name': '0x0', 'value': '0x2710', 'type': 'int', 'evaluateName': 'd[(1, 10, 100)][0]', 'variablesReference': 0, },
|
||||
{'name': '0x1', 'value': '0x186a0', 'type': 'int', 'evaluateName': 'd[(1, 10, 100)][1]', 'variablesReference': 0, },
|
||||
{'name': '0x2', 'value': '0x186a0', 'type': 'int', 'evaluateName': 'd[(1, 10, 100)][2]', 'variablesReference': 0, },
|
||||
{'name': '__len__', 'value': '0x3', 'type': 'int', 'evaluateName': 'len(d[(1, 10, 100)])', 'variablesReference': 0, 'presentationHint': {'attributes': ['readOnly']}, }
|
||||
{
|
||||
"name": "0x0",
|
||||
"value": "0x2710",
|
||||
"type": "int",
|
||||
"evaluateName": "d[(1, 10, 100)][0]",
|
||||
"variablesReference": 0,
|
||||
},
|
||||
{
|
||||
"name": "0x1",
|
||||
"value": "0x186a0",
|
||||
"type": "int",
|
||||
"evaluateName": "d[(1, 10, 100)][1]",
|
||||
"variablesReference": 0,
|
||||
},
|
||||
{
|
||||
"name": "0x2",
|
||||
"value": "0x186a0",
|
||||
"type": "int",
|
||||
"evaluateName": "d[(1, 10, 100)][2]",
|
||||
"variablesReference": 0,
|
||||
},
|
||||
{
|
||||
"name": "__len__",
|
||||
"value": "0x3",
|
||||
"type": "int",
|
||||
"evaluateName": "len(d[(1, 10, 100)])",
|
||||
"variablesReference": 0,
|
||||
"presentationHint": {"attributes": ["readOnly"]},
|
||||
},
|
||||
]
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -11,210 +11,228 @@ from tests.patterns import some
|
|||
from tests.timeline import Event
|
||||
|
||||
|
||||
@pytest.mark.parametrize('raised', ['raisedOn', 'raisedOff'])
|
||||
@pytest.mark.parametrize('uncaught', ['uncaughtOn', 'uncaughtOff'])
|
||||
def test_vsc_exception_options_raise_with_except(pyfile, start_method, run_as, raised, uncaught):
|
||||
|
||||
@pytest.mark.parametrize("raised", ["raisedOn", "raisedOff"])
|
||||
@pytest.mark.parametrize("uncaught", ["uncaughtOn", "uncaughtOff"])
|
||||
def test_vsc_exception_options_raise_with_except(
|
||||
pyfile, start_method, run_as, raised, uncaught
|
||||
):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
|
||||
def raise_with_except():
|
||||
try:
|
||||
raise ArithmeticError('bad code') # @exception_line
|
||||
raise ArithmeticError("bad code") # @exception_line
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
raise_with_except()
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
ex_line = line_numbers['exception_line']
|
||||
ex_line = code_to_debug.lines["exception_line"]
|
||||
filters = []
|
||||
filters += ['raised'] if raised == 'raisedOn' else []
|
||||
filters += ['uncaught'] if uncaught == 'uncaughtOn' else []
|
||||
filters += ["raised"] if raised == "raisedOn" else []
|
||||
filters += ["uncaught"] if uncaught == "uncaughtOn" else []
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.send_request('setExceptionBreakpoints', {
|
||||
'filters': filters
|
||||
}).wait_for_response()
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.send_request(
|
||||
"setExceptionBreakpoints", {"filters": filters}
|
||||
).wait_for_response()
|
||||
session.start_debugging()
|
||||
|
||||
expected = ANY.dict_with({
|
||||
'exceptionId': ANY.such_that(lambda s: s.endswith('ArithmeticError')),
|
||||
'description': 'bad code',
|
||||
'breakMode': 'always' if raised == 'raisedOn' else 'unhandled',
|
||||
'details': ANY.dict_with({
|
||||
'typeName': ANY.such_that(lambda s: s.endswith('ArithmeticError')),
|
||||
'message': 'bad code',
|
||||
'source': Path(code_to_debug),
|
||||
}),
|
||||
})
|
||||
expected = some.dict.containing(
|
||||
{
|
||||
"exceptionId": some.str.such_that(
|
||||
lambda s: s.endswith("ArithmeticError")
|
||||
),
|
||||
"description": "bad code",
|
||||
"breakMode": "always" if raised == "raisedOn" else "unhandled",
|
||||
"details": some.dict.containing(
|
||||
{
|
||||
"typeName": some.str.such_that(
|
||||
lambda s: s.endswith("ArithmeticError")
|
||||
),
|
||||
"message": "bad code",
|
||||
"source": some.path(code_to_debug),
|
||||
}
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
if raised == 'raisedOn':
|
||||
if raised == "raisedOn":
|
||||
hit = session.wait_for_thread_stopped(
|
||||
reason='exception',
|
||||
text=ANY.such_that(lambda s: s.endswith('ArithmeticError')),
|
||||
description='bad code',
|
||||
reason="exception",
|
||||
text=some.str.such_that(lambda s: s.endswith("ArithmeticError")),
|
||||
description="bad code",
|
||||
)
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert ex_line == frames[0]['line']
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert ex_line == frames[0]["line"]
|
||||
|
||||
resp_exc_info = session.send_request('exceptionInfo', {
|
||||
'threadId': hit.thread_id
|
||||
}).wait_for_response()
|
||||
resp_exc_info = session.send_request(
|
||||
"exceptionInfo", {"threadId": hit.thread_id}
|
||||
).wait_for_response()
|
||||
|
||||
assert resp_exc_info.body == expected
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
# uncaught should not 'stop' matter since the exception is caught
|
||||
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('raised', ['raisedOn', 'raisedOff'])
|
||||
@pytest.mark.parametrize('uncaught', ['uncaughtOn', 'uncaughtOff'])
|
||||
def test_vsc_exception_options_raise_without_except(pyfile, start_method, run_as, raised, uncaught):
|
||||
|
||||
@pytest.mark.parametrize("raised", ["raisedOn", "raisedOff"])
|
||||
@pytest.mark.parametrize("uncaught", ["uncaughtOn", "uncaughtOff"])
|
||||
def test_vsc_exception_options_raise_without_except(
|
||||
pyfile, start_method, run_as, raised, uncaught
|
||||
):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
|
||||
def raise_without_except():
|
||||
raise ArithmeticError('bad code') # @exception_line
|
||||
raise ArithmeticError("bad code") # @exception_line
|
||||
|
||||
raise_without_except()
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
ex_line = line_numbers['exception_line']
|
||||
ex_line = code_to_debug.lines["exception_line"]
|
||||
filters = []
|
||||
filters += ['raised'] if raised == 'raisedOn' else []
|
||||
filters += ['uncaught'] if uncaught == 'uncaughtOn' else []
|
||||
filters += ["raised"] if raised == "raisedOn" else []
|
||||
filters += ["uncaught"] if uncaught == "uncaughtOn" else []
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
ignore_unobserved=[Event('stopped')],
|
||||
expected_returncode=ANY.int,
|
||||
ignore_unobserved=[Event("stopped")],
|
||||
expected_returncode=some.int,
|
||||
)
|
||||
session.send_request('setExceptionBreakpoints', {
|
||||
'filters': filters
|
||||
}).wait_for_response()
|
||||
session.send_request(
|
||||
"setExceptionBreakpoints", {"filters": filters}
|
||||
).wait_for_response()
|
||||
session.start_debugging()
|
||||
|
||||
expected = ANY.dict_with({
|
||||
'exceptionId': ANY.such_that(lambda s: s.endswith('ArithmeticError')),
|
||||
'description': 'bad code',
|
||||
'breakMode': 'always' if raised == 'raisedOn' else 'unhandled',
|
||||
'details': ANY.dict_with({
|
||||
'typeName': ANY.such_that(lambda s: s.endswith('ArithmeticError')),
|
||||
'message': 'bad code',
|
||||
'source': Path(code_to_debug),
|
||||
}),
|
||||
})
|
||||
expected = some.dict.containing(
|
||||
{
|
||||
"exceptionId": some.str.such_that(
|
||||
lambda s: s.endswith("ArithmeticError")
|
||||
),
|
||||
"description": "bad code",
|
||||
"breakMode": "always" if raised == "raisedOn" else "unhandled",
|
||||
"details": some.dict.containing(
|
||||
{
|
||||
"typeName": some.str.such_that(
|
||||
lambda s: s.endswith("ArithmeticError")
|
||||
),
|
||||
"message": "bad code",
|
||||
"source": some.path(code_to_debug),
|
||||
}
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
if raised == 'raisedOn':
|
||||
hit = session.wait_for_thread_stopped(reason='exception')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert ex_line == frames[0]['line']
|
||||
if raised == "raisedOn":
|
||||
hit = session.wait_for_thread_stopped(reason="exception")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert ex_line == frames[0]["line"]
|
||||
|
||||
resp_exc_info = session.send_request('exceptionInfo', {
|
||||
'threadId': hit.thread_id
|
||||
}).wait_for_response()
|
||||
resp_exc_info = session.send_request(
|
||||
"exceptionInfo", {"threadId": hit.thread_id}
|
||||
).wait_for_response()
|
||||
|
||||
assert resp_exc_info.body == expected
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
# NOTE: debugger stops at each frame if raised and is uncaught
|
||||
# This behavior can be changed by updating 'notify_on_handled_exceptions'
|
||||
# setting we send to pydevd to notify only once. In our test code, we have
|
||||
# two frames, hence two stops.
|
||||
session.wait_for_thread_stopped(reason='exception')
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.wait_for_thread_stopped(reason="exception")
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
if uncaught == 'uncaughtOn':
|
||||
hit = session.wait_for_thread_stopped(reason='exception')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert ex_line == frames[0]['line']
|
||||
if uncaught == "uncaughtOn":
|
||||
hit = session.wait_for_thread_stopped(reason="exception")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert ex_line == frames[0]["line"]
|
||||
|
||||
resp_exc_info = session.send_request('exceptionInfo', {
|
||||
'threadId': hit.thread_id
|
||||
}).wait_for_response()
|
||||
resp_exc_info = session.send_request(
|
||||
"exceptionInfo", {"threadId": hit.thread_id}
|
||||
).wait_for_response()
|
||||
|
||||
expected = ANY.dict_with({
|
||||
'exceptionId': ANY.such_that(lambda s: s.endswith('ArithmeticError')),
|
||||
'description': 'bad code',
|
||||
'breakMode': 'unhandled', # Only difference from previous expected is breakMode.
|
||||
'details': ANY.dict_with({
|
||||
'typeName': ANY.such_that(lambda s: s.endswith('ArithmeticError')),
|
||||
'message': 'bad code',
|
||||
'source': Path(code_to_debug),
|
||||
}),
|
||||
})
|
||||
expected = some.dict.containing(
|
||||
{
|
||||
"exceptionId": some.str.such_that(
|
||||
lambda s: s.endswith("ArithmeticError")
|
||||
),
|
||||
"description": "bad code",
|
||||
"breakMode": "unhandled", # Only difference from previous expected is breakMode.
|
||||
"details": some.dict.containing(
|
||||
{
|
||||
"typeName": some.str.such_that(
|
||||
lambda s: s.endswith("ArithmeticError")
|
||||
),
|
||||
"message": "bad code",
|
||||
"source": some.path(code_to_debug),
|
||||
}
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
assert resp_exc_info.body == expected
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('raised', ['raised', ''])
|
||||
@pytest.mark.parametrize('uncaught', ['uncaught', ''])
|
||||
@pytest.mark.parametrize('zero', ['zero', ''])
|
||||
@pytest.mark.parametrize('exit_code', [0, 1, 'nan'])
|
||||
@pytest.mark.parametrize("raised", ["raised", ""])
|
||||
@pytest.mark.parametrize("uncaught", ["uncaught", ""])
|
||||
@pytest.mark.parametrize("zero", ["zero", ""])
|
||||
@pytest.mark.parametrize("exit_code", [0, 1, "nan"])
|
||||
def test_systemexit(pyfile, start_method, run_as, raised, uncaught, zero, exit_code):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
import sys
|
||||
|
||||
exit_code = eval(sys.argv[1])
|
||||
print('sys.exit(%r)' % (exit_code,))
|
||||
print("sys.exit(%r)" % (exit_code,))
|
||||
try:
|
||||
sys.exit(exit_code) # @handled
|
||||
except SystemExit:
|
||||
pass
|
||||
sys.exit(exit_code) # @unhandled
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
line_numbers = code_to_debug.lines
|
||||
|
||||
filters = []
|
||||
if raised:
|
||||
filters += ['raised']
|
||||
filters += ["raised"]
|
||||
if uncaught:
|
||||
filters += ['uncaught']
|
||||
filters += ["uncaught"]
|
||||
|
||||
with debug.Session() as session:
|
||||
session.program_args = [repr(exit_code)]
|
||||
if zero:
|
||||
session.debug_options += ['BreakOnSystemExitZero']
|
||||
session.debug_options += ["BreakOnSystemExitZero"]
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
expected_returncode=ANY.int,
|
||||
expected_returncode=some.int,
|
||||
)
|
||||
session.send_request('setExceptionBreakpoints', {
|
||||
'filters': filters
|
||||
}).wait_for_response()
|
||||
session.send_request(
|
||||
"setExceptionBreakpoints", {"filters": filters}
|
||||
).wait_for_response()
|
||||
session.start_debugging()
|
||||
|
||||
# When breaking on raised exceptions, we'll stop on both lines,
|
||||
# unless it's SystemExit(0) and we asked to ignore that.
|
||||
if raised and (zero or exit_code != 0):
|
||||
hit = session.wait_for_thread_stopped(reason='exception')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert frames[0]['line'] == line_numbers['handled']
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
hit = session.wait_for_thread_stopped(reason="exception")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert frames[0]["line"] == line_numbers["handled"]
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
hit = session.wait_for_thread_stopped(reason='exception')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert frames[0]['line'] == line_numbers['unhandled']
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
hit = session.wait_for_thread_stopped(reason="exception")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert frames[0]["line"] == line_numbers["unhandled"]
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
# When breaking on uncaught exceptions, we'll stop on the second line,
|
||||
# unless it's SystemExit(0) and we asked to ignore that.
|
||||
|
|
@ -223,37 +241,42 @@ def test_systemexit(pyfile, start_method, run_as, raised, uncaught, zero, exit_c
|
|||
# for it unwinding the stack without finding a handler. The block above
|
||||
# takes care of the first stop, so here we just take care of the second.
|
||||
if uncaught and (zero or exit_code != 0):
|
||||
hit = session.wait_for_thread_stopped(reason='exception')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert frames[0]['line'] == line_numbers['unhandled']
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
hit = session.wait_for_thread_stopped(reason="exception")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert frames[0]["line"] == line_numbers["unhandled"]
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('break_mode', ['always', 'never', 'unhandled', 'userUnhandled'])
|
||||
@pytest.mark.parametrize('exceptions', [
|
||||
['RuntimeError'],
|
||||
['AssertionError'],
|
||||
['RuntimeError', 'AssertionError'],
|
||||
[], # Add the whole Python Exceptions category.
|
||||
])
|
||||
@pytest.mark.parametrize(
|
||||
"break_mode", ["always", "never", "unhandled", "userUnhandled"]
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"exceptions",
|
||||
[
|
||||
["RuntimeError"],
|
||||
["AssertionError"],
|
||||
["RuntimeError", "AssertionError"],
|
||||
[], # Add the whole Python Exceptions category.
|
||||
],
|
||||
)
|
||||
def test_raise_exception_options(pyfile, start_method, run_as, exceptions, break_mode):
|
||||
|
||||
if break_mode in ('never', 'unhandled', 'userUnhandled'):
|
||||
if break_mode in ("never", "unhandled", "userUnhandled"):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
|
||||
raise AssertionError() # @AssertionError
|
||||
|
||||
if break_mode == 'never':
|
||||
if break_mode == "never":
|
||||
expect_exceptions = []
|
||||
|
||||
elif 'AssertionError' in exceptions or not exceptions:
|
||||
elif "AssertionError" in exceptions or not exceptions:
|
||||
# Only AssertionError is raised in this use-case.
|
||||
expect_exceptions = ['AssertionError']
|
||||
expect_exceptions = ["AssertionError"]
|
||||
|
||||
else:
|
||||
expect_exceptions = []
|
||||
|
|
@ -262,12 +285,12 @@ def test_raise_exception_options(pyfile, start_method, run_as, exceptions, break
|
|||
expect_exceptions = exceptions[:]
|
||||
if not expect_exceptions:
|
||||
# Deal with the Python Exceptions category
|
||||
expect_exceptions = ['RuntimeError', 'AssertionError', 'IndexError']
|
||||
expect_exceptions = ["RuntimeError", "AssertionError", "IndexError"]
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
|
||||
try:
|
||||
raise RuntimeError() # @RuntimeError
|
||||
except RuntimeError:
|
||||
|
|
@ -281,49 +304,49 @@ def test_raise_exception_options(pyfile, start_method, run_as, exceptions, break
|
|||
except IndexError:
|
||||
pass
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
ignore_unobserved=[Event('stopped')],
|
||||
expected_returncode=ANY.int,
|
||||
ignore_unobserved=[Event("stopped")],
|
||||
expected_returncode=some.int,
|
||||
)
|
||||
path = [
|
||||
{'names': ['Python Exceptions']},
|
||||
]
|
||||
path = [{"names": ["Python Exceptions"]}]
|
||||
if exceptions:
|
||||
path.append({'names': exceptions})
|
||||
session.send_request('setExceptionBreakpoints', {
|
||||
'filters': [], # Unused when exceptionOptions is passed.
|
||||
'exceptionOptions': [{
|
||||
'path': path,
|
||||
'breakMode': break_mode, # Can be "never", "always", "unhandled", "userUnhandled"
|
||||
}],
|
||||
}).wait_for_response()
|
||||
path.append({"names": exceptions})
|
||||
session.send_request(
|
||||
"setExceptionBreakpoints",
|
||||
{
|
||||
"filters": [], # Unused when exceptionOptions is passed.
|
||||
"exceptionOptions": [
|
||||
{
|
||||
"path": path,
|
||||
"breakMode": break_mode, # Can be "never", "always", "unhandled", "userUnhandled"
|
||||
}
|
||||
],
|
||||
},
|
||||
).wait_for_response()
|
||||
session.start_debugging()
|
||||
|
||||
for expected_exception in expect_exceptions:
|
||||
hit = session.wait_for_thread_stopped(reason='exception')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert frames[0]['source']['path'].endswith('code_to_debug.py')
|
||||
assert frames[0]['line'] == line_numbers[expected_exception]
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
hit = session.wait_for_thread_stopped(reason="exception")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert frames[0]["source"]["path"].endswith("code_to_debug.py")
|
||||
assert frames[0]["line"] == code_to_debug.lines[expected_exception]
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('exit_code', [0, 3])
|
||||
@pytest.mark.parametrize("exit_code", [0, 3])
|
||||
def test_success_exitcodes(pyfile, start_method, run_as, exit_code):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
import sys
|
||||
|
||||
exit_code = eval(sys.argv[1])
|
||||
print('sys.exit(%r)' % (exit_code,))
|
||||
print("sys.exit(%r)" % (exit_code,))
|
||||
sys.exit(exit_code)
|
||||
|
||||
with debug.Session() as session:
|
||||
|
|
@ -334,29 +357,27 @@ def test_success_exitcodes(pyfile, start_method, run_as, exit_code):
|
|||
start_method=start_method,
|
||||
expected_returncode=exit_code,
|
||||
)
|
||||
session.send_request('setExceptionBreakpoints', {
|
||||
'filters': ['uncaught']
|
||||
}).wait_for_response()
|
||||
session.send_request(
|
||||
"setExceptionBreakpoints", {"filters": ["uncaught"]}
|
||||
).wait_for_response()
|
||||
session.start_debugging()
|
||||
|
||||
if exit_code == 0:
|
||||
session.wait_for_thread_stopped(reason='exception')
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.wait_for_thread_stopped(reason="exception")
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('max_frames', ['default', 'all', 10])
|
||||
@pytest.mark.parametrize("max_frames", ["default", "all", 10])
|
||||
def test_exception_stack(pyfile, start_method, run_as, max_frames):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
|
||||
def do_something(n):
|
||||
if n <= 0:
|
||||
raise ArithmeticError('bad code') # @unhandled
|
||||
raise ArithmeticError("bad code") # @unhandled
|
||||
do_something2(n - 1)
|
||||
|
||||
def do_something2(n):
|
||||
|
|
@ -364,12 +385,12 @@ def test_exception_stack(pyfile, start_method, run_as, max_frames):
|
|||
|
||||
do_something(100)
|
||||
|
||||
if max_frames == 'all':
|
||||
if max_frames == "all":
|
||||
# trace back compresses repeated text
|
||||
min_expected_lines = 100
|
||||
max_expected_lines = 221
|
||||
args = {'maxExceptionStackFrames': 0}
|
||||
elif max_frames == 'default':
|
||||
args = {"maxExceptionStackFrames": 0}
|
||||
elif max_frames == "default":
|
||||
# default is all frames
|
||||
min_expected_lines = 100
|
||||
max_expected_lines = 221
|
||||
|
|
@ -377,44 +398,47 @@ def test_exception_stack(pyfile, start_method, run_as, max_frames):
|
|||
else:
|
||||
min_expected_lines = 10
|
||||
max_expected_lines = 21
|
||||
args = {'maxExceptionStackFrames': 10}
|
||||
args = {"maxExceptionStackFrames": 10}
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
expected_returncode=ANY.int,
|
||||
expected_returncode=some.int,
|
||||
args=args,
|
||||
)
|
||||
session.send_request('setExceptionBreakpoints', {
|
||||
'filters': ['uncaught']
|
||||
}).wait_for_response()
|
||||
session.send_request(
|
||||
"setExceptionBreakpoints", {"filters": ["uncaught"]}
|
||||
).wait_for_response()
|
||||
session.start_debugging()
|
||||
|
||||
hit = session.wait_for_thread_stopped(reason='exception')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert frames[0]['line'] == line_numbers['unhandled']
|
||||
hit = session.wait_for_thread_stopped(reason="exception")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert frames[0]["line"] == code_to_debug.lines["unhandled"]
|
||||
|
||||
resp_exc_info = session.send_request('exceptionInfo', {
|
||||
'threadId': hit.thread_id
|
||||
}).wait_for_response()
|
||||
resp_exc_info = session.send_request(
|
||||
"exceptionInfo", {"threadId": hit.thread_id}
|
||||
).wait_for_response()
|
||||
|
||||
expected = ANY.dict_with({
|
||||
'exceptionId': Regex('ArithmeticError'),
|
||||
'description': 'bad code',
|
||||
'breakMode': 'unhandled',
|
||||
'details': ANY.dict_with({
|
||||
'typeName': Regex('ArithmeticError'),
|
||||
'message': 'bad code',
|
||||
'source': Path(code_to_debug),
|
||||
}),
|
||||
})
|
||||
expected = some.dict.containing(
|
||||
{
|
||||
"exceptionId": some.matching("ArithmeticError"),
|
||||
"description": "bad code",
|
||||
"breakMode": "unhandled",
|
||||
"details": some.dict.containing(
|
||||
{
|
||||
"typeName": some.matching("ArithmeticError"),
|
||||
"message": "bad code",
|
||||
"source": some.path(code_to_debug),
|
||||
}
|
||||
),
|
||||
}
|
||||
)
|
||||
assert resp_exc_info.body == expected
|
||||
stack_str = resp_exc_info.body['details']['stackTrace']
|
||||
stack_line_count = len(stack_str.split('\n'))
|
||||
stack_str = resp_exc_info.body["details"]["stackTrace"]
|
||||
stack_line_count = len(stack_str.split("\n"))
|
||||
assert min_expected_lines <= stack_line_count <= max_expected_lines
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -4,108 +4,93 @@
|
|||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
from os import path
|
||||
import os.path
|
||||
import pytest
|
||||
|
||||
from tests import debug
|
||||
from tests import debug, test_data
|
||||
from tests.patterns import some
|
||||
|
||||
|
||||
@pytest.mark.parametrize('scenario', [
|
||||
'exclude_by_name',
|
||||
'exclude_by_dir',
|
||||
])
|
||||
@pytest.mark.parametrize('exception_type', [
|
||||
'RuntimeError',
|
||||
'SysExit'
|
||||
])
|
||||
def test_exceptions_and_exclude_rules(pyfile, start_method, run_as, scenario, exception_type):
|
||||
@pytest.mark.parametrize("scenario", ["exclude_by_name", "exclude_by_dir"])
|
||||
@pytest.mark.parametrize("exception_type", ["RuntimeError", "SysExit"])
|
||||
def test_exceptions_and_exclude_rules(
|
||||
pyfile, start_method, run_as, scenario, exception_type
|
||||
):
|
||||
|
||||
if exception_type == 'RuntimeError':
|
||||
if exception_type == "RuntimeError":
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
raise RuntimeError('unhandled error') # @raise_line
|
||||
import debug_me # noqa
|
||||
|
||||
elif exception_type == 'SysExit':
|
||||
raise RuntimeError("unhandled error") # @raise_line
|
||||
|
||||
elif exception_type == "SysExit":
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import debug_me # noqa
|
||||
import sys
|
||||
import_and_enable_debugger()
|
||||
|
||||
sys.exit(1) # @raise_line
|
||||
|
||||
else:
|
||||
raise AssertionError('Unexpected exception_type: %s' % (exception_type,))
|
||||
raise AssertionError("Unexpected exception_type: %s" % (exception_type,))
|
||||
|
||||
if scenario == 'exclude_by_name':
|
||||
rules = [{'path': '**/' + path.basename(code_to_debug), 'include': False}]
|
||||
elif scenario == 'exclude_by_dir':
|
||||
rules = [{'path': os.path.dirname(code_to_debug), 'include': False}]
|
||||
if scenario == "exclude_by_name":
|
||||
rules = [{"path": "**/" + os.path.basename(code_to_debug), "include": False}]
|
||||
elif scenario == "exclude_by_dir":
|
||||
rules = [{"path": os.path.dirname(code_to_debug), "include": False}]
|
||||
else:
|
||||
raise AssertionError('Unexpected scenario: %s' % (scenario,))
|
||||
raise AssertionError("Unexpected scenario: %s" % (scenario,))
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
rules=rules,
|
||||
target=(run_as, code_to_debug), start_method=start_method, rules=rules
|
||||
)
|
||||
# TODO: The process returncode doesn't match the one returned from the DAP.
|
||||
# See: https://github.com/Microsoft/ptvsd/issues/1278
|
||||
session.expected_returncode = ANY.int
|
||||
filters = ['raised', 'uncaught']
|
||||
session.expected_returncode = some.int
|
||||
filters = ["raised", "uncaught"]
|
||||
|
||||
session.send_request('setExceptionBreakpoints', {
|
||||
'filters': filters
|
||||
}).wait_for_response()
|
||||
session.send_request(
|
||||
"setExceptionBreakpoints", {"filters": filters}
|
||||
).wait_for_response()
|
||||
session.start_debugging()
|
||||
|
||||
# No exceptions should be seen.
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('scenario', [
|
||||
'exclude_code_to_debug',
|
||||
'exclude_callback_dir',
|
||||
])
|
||||
@pytest.mark.parametrize("scenario", ["exclude_code_to_debug", "exclude_callback_dir"])
|
||||
def test_exceptions_and_partial_exclude_rules(pyfile, start_method, run_as, scenario):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
|
||||
import backchannel
|
||||
import debug_me # noqa
|
||||
from debug_me import backchannel
|
||||
import sys
|
||||
|
||||
json = backchannel.read_json()
|
||||
call_me_back_dir = json['call_me_back_dir']
|
||||
call_me_back_dir = json["call_me_back_dir"]
|
||||
sys.path.append(call_me_back_dir)
|
||||
|
||||
import call_me_back
|
||||
|
||||
def call_func():
|
||||
raise RuntimeError('unhandled error') # @raise_line
|
||||
raise RuntimeError("unhandled error") # @raise_line
|
||||
|
||||
call_me_back.call_me_back(call_func) # @call_me_back_line
|
||||
print('done')
|
||||
print("done")
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
call_me_back_dir = get_test_root('call_me_back')
|
||||
line_numbers = code_to_debug.lines
|
||||
call_me_back_dir = test_data / "call_me_back"
|
||||
|
||||
if scenario == 'exclude_code_to_debug':
|
||||
rules = [
|
||||
{'path': '**/' + path.basename(code_to_debug), 'include': False}
|
||||
]
|
||||
elif scenario == 'exclude_callback_dir':
|
||||
rules = [
|
||||
{'path': call_me_back_dir, 'include': False}
|
||||
]
|
||||
if scenario == "exclude_code_to_debug":
|
||||
rules = [{"path": "**/" + os.path.basename(code_to_debug), "include": False}]
|
||||
elif scenario == "exclude_callback_dir":
|
||||
rules = [{"path": call_me_back_dir, "include": False}]
|
||||
else:
|
||||
raise AssertionError('Unexpected scenario: %s' % (scenario,))
|
||||
raise AssertionError("Unexpected scenario: %s" % (scenario,))
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
|
|
@ -116,87 +101,100 @@ def test_exceptions_and_partial_exclude_rules(pyfile, start_method, run_as, scen
|
|||
)
|
||||
# TODO: The process returncode doesn't match the one returned from the DAP.
|
||||
# See: https://github.com/Microsoft/ptvsd/issues/1278
|
||||
session.expected_returncode = ANY.int
|
||||
filters = ['raised', 'uncaught']
|
||||
session.expected_returncode = some.int
|
||||
filters = ["raised", "uncaught"]
|
||||
|
||||
session.send_request('setExceptionBreakpoints', {
|
||||
'filters': filters
|
||||
}).wait_for_response()
|
||||
session.send_request(
|
||||
"setExceptionBreakpoints", {"filters": filters}
|
||||
).wait_for_response()
|
||||
session.start_debugging()
|
||||
session.write_json({'call_me_back_dir': call_me_back_dir})
|
||||
session.write_json({"call_me_back_dir": call_me_back_dir})
|
||||
|
||||
if scenario == 'exclude_code_to_debug':
|
||||
if scenario == "exclude_code_to_debug":
|
||||
# Stop at handled
|
||||
hit = session.wait_for_thread_stopped(reason='exception')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
hit = session.wait_for_thread_stopped(reason="exception")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
# We don't stop at the raise line but rather at the callback module which is
|
||||
# not excluded.
|
||||
assert len(frames) == 1
|
||||
assert frames[0] == ANY.dict_with({
|
||||
'line': 2,
|
||||
'source': ANY.dict_with({
|
||||
'path': Path(os.path.join(call_me_back_dir, 'call_me_back.py'))
|
||||
})
|
||||
})
|
||||
# assert frames[1] == ANY.dict_with({ -- filtered out
|
||||
assert frames[0] == some.dict.containing(
|
||||
{
|
||||
"line": 2,
|
||||
"source": some.dict.containing(
|
||||
{
|
||||
"path": some.path(
|
||||
os.path.join(call_me_back_dir, "call_me_back.py")
|
||||
)
|
||||
}
|
||||
),
|
||||
}
|
||||
)
|
||||
# assert frames[1] == some.dict.containing({ -- filtered out
|
||||
# 'line': line_numbers['call_me_back_line'],
|
||||
# 'source': ANY.dict_with({
|
||||
# 'path': Path(code_to_debug)
|
||||
# 'source': some.dict.containing({
|
||||
# 'path': some.path(code_to_debug)
|
||||
# })
|
||||
# })
|
||||
# 'continue' should terminate the debuggee
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
# Note: does not stop at unhandled exception because raise was in excluded file.
|
||||
|
||||
elif scenario == 'exclude_callback_dir':
|
||||
elif scenario == "exclude_callback_dir":
|
||||
# Stop at handled raise_line
|
||||
hit = session.wait_for_thread_stopped(reason='exception')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert [(frame['name'], path.basename(frame['source']['path'])) for frame in frames] == [
|
||||
('call_func', 'code_to_debug.py'),
|
||||
hit = session.wait_for_thread_stopped(reason="exception")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert [
|
||||
(frame["name"], os.path.basename(frame["source"]["path"]))
|
||||
for frame in frames
|
||||
] == [
|
||||
("call_func", "code_to_debug.py"),
|
||||
# ('call_me_back', 'call_me_back.py'), -- filtered out
|
||||
('<module>', 'code_to_debug.py'),
|
||||
("<module>", "code_to_debug.py"),
|
||||
]
|
||||
assert frames[0] == ANY.dict_with({
|
||||
'line': line_numbers['raise_line'],
|
||||
'source': ANY.dict_with({
|
||||
'path': Path(code_to_debug)
|
||||
})
|
||||
})
|
||||
session.send_request('continue').wait_for_response()
|
||||
assert frames[0] == some.dict.containing(
|
||||
{
|
||||
"line": line_numbers["raise_line"],
|
||||
"source": some.dict.containing({"path": some.path(code_to_debug)}),
|
||||
}
|
||||
)
|
||||
session.send_request("continue").wait_for_response()
|
||||
|
||||
# Stop at handled call_me_back_line
|
||||
hit = session.wait_for_thread_stopped(reason='exception')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert [(frame['name'], path.basename(frame['source']['path'])) for frame in frames] == [
|
||||
('<module>', 'code_to_debug.py'),
|
||||
]
|
||||
assert frames[0] == ANY.dict_with({
|
||||
'line': line_numbers['call_me_back_line'],
|
||||
'source': ANY.dict_with({
|
||||
'path': Path(code_to_debug)
|
||||
})
|
||||
})
|
||||
session.send_request('continue').wait_for_response()
|
||||
hit = session.wait_for_thread_stopped(reason="exception")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert [
|
||||
(frame["name"], os.path.basename(frame["source"]["path"]))
|
||||
for frame in frames
|
||||
] == [("<module>", "code_to_debug.py")]
|
||||
assert frames[0] == some.dict.containing(
|
||||
{
|
||||
"line": line_numbers["call_me_back_line"],
|
||||
"source": some.dict.containing({"path": some.path(code_to_debug)}),
|
||||
}
|
||||
)
|
||||
session.send_request("continue").wait_for_response()
|
||||
|
||||
# Stop at unhandled
|
||||
hit = session.wait_for_thread_stopped(reason='exception')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert [(frame['name'], path.basename(frame['source']['path'])) for frame in frames] == [
|
||||
('call_func', 'code_to_debug.py'),
|
||||
hit = session.wait_for_thread_stopped(reason="exception")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert [
|
||||
(frame["name"], os.path.basename(frame["source"]["path"]))
|
||||
for frame in frames
|
||||
] == [
|
||||
("call_func", "code_to_debug.py"),
|
||||
# ('call_me_back', 'call_me_back.py'), -- filtered out
|
||||
('<module>', 'code_to_debug.py'),
|
||||
("<module>", "code_to_debug.py"),
|
||||
]
|
||||
|
||||
assert frames[0] == ANY.dict_with({
|
||||
'line': line_numbers['raise_line'],
|
||||
'source': ANY.dict_with({
|
||||
'path': Path(code_to_debug)
|
||||
})
|
||||
})
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
assert frames[0] == some.dict.containing(
|
||||
{
|
||||
"line": line_numbers["raise_line"],
|
||||
"source": some.dict.containing({"path": some.path(code_to_debug)}),
|
||||
}
|
||||
)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
else:
|
||||
raise AssertionError('Unexpected scenario: %s' % (scenario,))
|
||||
raise AssertionError("Unexpected scenario: %s" % (scenario,))
|
||||
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -8,58 +8,59 @@ import platform
|
|||
import pytest
|
||||
import sys
|
||||
|
||||
from tests import debug, net, test_data
|
||||
from tests import code, debug, net, test_data
|
||||
from tests.patterns import some
|
||||
from tests.timeline import Event
|
||||
|
||||
|
||||
FLASK1_ROOT = test_data / 'flask1'
|
||||
FLASK1_APP = FLASK1_ROOT / 'app.py'
|
||||
FLASK1_TEMPLATE = FLASK1_ROOT / 'templates' / 'hello.html'
|
||||
FLASK1_BAD_TEMPLATE = FLASK1_ROOT / 'templates' / 'bad.html'
|
||||
FLASK1_ROOT = test_data / "flask1"
|
||||
FLASK1_APP = FLASK1_ROOT / "app.py"
|
||||
FLASK1_TEMPLATE = FLASK1_ROOT / "templates" / "hello.html"
|
||||
FLASK1_BAD_TEMPLATE = FLASK1_ROOT / "templates" / "bad.html"
|
||||
FLASK_PORT = net.get_test_server_port(7000, 7100)
|
||||
|
||||
flask_server = net.WebServer(FLASK_PORT)
|
||||
app_py_lines = code.get_marked_line_numbers(FLASK1_APP)
|
||||
|
||||
|
||||
def _initialize_flask_session_no_multiproc(session, start_method):
|
||||
env = {
|
||||
'FLASK_APP': 'app.py',
|
||||
'FLASK_ENV': 'development',
|
||||
'FLASK_DEBUG': '0',
|
||||
}
|
||||
if platform.system() != 'Windows':
|
||||
locale = 'en_US.utf8' if platform.system() == 'Linux' else 'en_US.UTF-8'
|
||||
env.update({
|
||||
'LC_ALL': locale,
|
||||
'LANG': locale,
|
||||
})
|
||||
env = {"FLASK_APP": "app.py", "FLASK_ENV": "development", "FLASK_DEBUG": "0"}
|
||||
if platform.system() != "Windows":
|
||||
locale = "en_US.utf8" if platform.system() == "Linux" else "en_US.UTF-8"
|
||||
env.update({"LC_ALL": locale, "LANG": locale})
|
||||
|
||||
session.initialize(
|
||||
start_method=start_method,
|
||||
target=('module', 'flask'),
|
||||
program_args=['run', '--no-debugger', '--no-reload', '--with-threads', '--port', str(FLASK_PORT)],
|
||||
ignore_unobserved=[Event('stopped')],
|
||||
debug_options=['Jinja'],
|
||||
target=("module", "flask"),
|
||||
program_args=[
|
||||
"run",
|
||||
"--no-debugger",
|
||||
"--no-reload",
|
||||
"--with-threads",
|
||||
"--port",
|
||||
str(FLASK_PORT),
|
||||
],
|
||||
ignore_unobserved=[Event("stopped")],
|
||||
debug_options=["Jinja"],
|
||||
cwd=FLASK1_ROOT,
|
||||
env=env,
|
||||
expected_returncode=ANY.int, # No clean way to kill Flask server
|
||||
expected_returncode=some.int, # No clean way to kill Flask server
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('bp_target', ['code', 'template'])
|
||||
@pytest.mark.parametrize('start_method', ['launch', 'attach_socket_cmdline'])
|
||||
@pytest.mark.parametrize("bp_target", ["code", "template"])
|
||||
@pytest.mark.parametrize("start_method", ["launch", "attach_socket_cmdline"])
|
||||
@pytest.mark.timeout(60)
|
||||
def test_flask_breakpoint_no_multiproc(bp_target, start_method):
|
||||
bp_file, bp_line, bp_name = {
|
||||
'code': (FLASK1_APP, 11, 'home'),
|
||||
'template': (FLASK1_TEMPLATE, 8, 'template')
|
||||
"code": (FLASK1_APP, app_py_lines["bphome"], "home"),
|
||||
"template": (FLASK1_TEMPLATE, 8, "template"),
|
||||
}[bp_target]
|
||||
|
||||
with debug.Session() as session:
|
||||
_initialize_flask_session_no_multiproc(session, start_method)
|
||||
|
||||
bp_var_content = 'Flask-Jinja-Test'
|
||||
bp_var_content = "Flask-Jinja-Test"
|
||||
session.set_breakpoints(bp_file, [bp_line])
|
||||
session.start_debugging()
|
||||
|
||||
|
|
@ -68,288 +69,314 @@ def test_flask_breakpoint_no_multiproc(bp_target, start_method):
|
|||
link = FLASK_LINK
|
||||
web_request = get_web_content(link, {})
|
||||
|
||||
thread_stopped = session.wait_for_next(Event('stopped'), ANY.dict_with({'reason': 'breakpoint'}))
|
||||
assert thread_stopped.body['threadId'] is not None
|
||||
thread_stopped = session.wait_for_next(
|
||||
Event("stopped"), some.dict.containing({"reason": "breakpoint"})
|
||||
)
|
||||
assert thread_stopped.body["threadId"] is not None
|
||||
|
||||
tid = thread_stopped.body['threadId']
|
||||
tid = thread_stopped.body["threadId"]
|
||||
|
||||
resp_stacktrace = session.send_request('stackTrace', arguments={
|
||||
'threadId': tid,
|
||||
}).wait_for_response()
|
||||
assert resp_stacktrace.body['totalFrames'] > 0
|
||||
frames = resp_stacktrace.body['stackFrames']
|
||||
resp_stacktrace = session.send_request(
|
||||
"stackTrace", arguments={"threadId": tid}
|
||||
).wait_for_response()
|
||||
assert resp_stacktrace.body["totalFrames"] > 0
|
||||
frames = resp_stacktrace.body["stackFrames"]
|
||||
assert frames[0] == {
|
||||
'id': ANY.dap_id,
|
||||
'name': bp_name,
|
||||
'source': {
|
||||
'sourceReference': ANY.dap_id,
|
||||
'path': Path(bp_file),
|
||||
},
|
||||
'line': bp_line,
|
||||
'column': 1,
|
||||
"id": some.dap_id,
|
||||
"name": bp_name,
|
||||
"source": {"sourceReference": some.dap_id, "path": some.path(bp_file)},
|
||||
"line": bp_line,
|
||||
"column": 1,
|
||||
}
|
||||
|
||||
fid = frames[0]['id']
|
||||
resp_scopes = session.send_request('scopes', arguments={
|
||||
'frameId': fid
|
||||
}).wait_for_response()
|
||||
scopes = resp_scopes.body['scopes']
|
||||
fid = frames[0]["id"]
|
||||
resp_scopes = session.send_request(
|
||||
"scopes", arguments={"frameId": fid}
|
||||
).wait_for_response()
|
||||
scopes = resp_scopes.body["scopes"]
|
||||
assert len(scopes) > 0
|
||||
|
||||
resp_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': scopes[0]['variablesReference']
|
||||
}).wait_for_response()
|
||||
variables = list(v for v in resp_variables.body['variables'] if v['name'] == 'content')
|
||||
assert variables == [{
|
||||
'name': 'content',
|
||||
'type': 'str',
|
||||
'value': repr(bp_var_content),
|
||||
'presentationHint': {'attributes': ['rawString']},
|
||||
'evaluateName': 'content',
|
||||
'variablesReference': 0,
|
||||
}]
|
||||
resp_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={"variablesReference": scopes[0]["variablesReference"]},
|
||||
).wait_for_response()
|
||||
variables = list(
|
||||
v for v in resp_variables.body["variables"] if v["name"] == "content"
|
||||
)
|
||||
assert variables == [
|
||||
{
|
||||
"name": "content",
|
||||
"type": "str",
|
||||
"value": repr(bp_var_content),
|
||||
"presentationHint": {"attributes": ["rawString"]},
|
||||
"evaluateName": "content",
|
||||
"variablesReference": 0,
|
||||
}
|
||||
]
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
web_content = web_request.wait_for_response()
|
||||
assert web_content.find(bp_var_content) != -1
|
||||
|
||||
# shutdown to web server
|
||||
link = FLASK_LINK + 'exit'
|
||||
link = FLASK_LINK + "exit"
|
||||
get_web_content(link).wait_for_response()
|
||||
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('start_method', ['launch', 'attach_socket_cmdline'])
|
||||
@pytest.mark.parametrize("start_method", ["launch", "attach_socket_cmdline"])
|
||||
@pytest.mark.timeout(60)
|
||||
def test_flask_template_exception_no_multiproc(start_method):
|
||||
with debug.Session() as session:
|
||||
_initialize_flask_session_no_multiproc(session, start_method)
|
||||
|
||||
session.send_request('setExceptionBreakpoints', arguments={
|
||||
'filters': ['raised', 'uncaught'],
|
||||
}).wait_for_response()
|
||||
session.send_request(
|
||||
"setExceptionBreakpoints", arguments={"filters": ["raised", "uncaught"]}
|
||||
).wait_for_response()
|
||||
|
||||
session.start_debugging()
|
||||
|
||||
# wait for Flask web server to start
|
||||
wait_for_connection(FLASK_PORT)
|
||||
base_link = FLASK_LINK
|
||||
part = 'badtemplate'
|
||||
link = base_link + part if base_link.endswith('/') else ('/' + part)
|
||||
part = "badtemplate"
|
||||
link = base_link + part if base_link.endswith("/") else ("/" + part)
|
||||
web_request = get_web_content(link, {})
|
||||
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert frames[0] == ANY.dict_with({
|
||||
'id': ANY.dap_id,
|
||||
'name': 'template' if sys.version_info[0] >= 3 else 'Jinja2 TemplateSyntaxError',
|
||||
'source': ANY.dict_with({
|
||||
'sourceReference': ANY.dap_id,
|
||||
'path': Path(FLASK1_BAD_TEMPLATE),
|
||||
}),
|
||||
'line': 8,
|
||||
'column': 1,
|
||||
})
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert frames[0] == some.dict.containing(
|
||||
{
|
||||
"id": some.dap_id,
|
||||
"name": "template"
|
||||
if sys.version_info[0] >= 3
|
||||
else "Jinja2 TemplateSyntaxError",
|
||||
"source": some.dict.containing(
|
||||
{
|
||||
"sourceReference": some.dap_id,
|
||||
"path": some.path(FLASK1_BAD_TEMPLATE),
|
||||
}
|
||||
),
|
||||
"line": 8,
|
||||
"column": 1,
|
||||
}
|
||||
)
|
||||
|
||||
resp_exception_info = session.send_request(
|
||||
'exceptionInfo',
|
||||
arguments={'threadId': hit.thread_id, }
|
||||
"exceptionInfo", arguments={"threadId": hit.thread_id}
|
||||
).wait_for_response()
|
||||
exception = resp_exception_info.body
|
||||
assert exception == ANY.dict_with({
|
||||
'exceptionId': ANY.such_that(lambda s: s.endswith('TemplateSyntaxError')),
|
||||
'breakMode': 'always',
|
||||
'description': ANY.such_that(lambda s: s.find('doesnotexist') > -1),
|
||||
'details': ANY.dict_with({
|
||||
'message': ANY.such_that(lambda s: s.find('doesnotexist') > -1),
|
||||
'typeName': ANY.such_that(lambda s: s.endswith('TemplateSyntaxError')),
|
||||
})
|
||||
})
|
||||
assert exception == some.dict.containing(
|
||||
{
|
||||
"exceptionId": some.str.such_that(
|
||||
lambda s: s.endswith("TemplateSyntaxError")
|
||||
),
|
||||
"breakMode": "always",
|
||||
"description": some.str.such_that(
|
||||
lambda s: s.find("doesnotexist") > -1
|
||||
),
|
||||
"details": some.dict.containing(
|
||||
{
|
||||
"message": some.str.such_that(
|
||||
lambda s: s.find("doesnotexist") > -1
|
||||
),
|
||||
"typeName": some.str.such_that(
|
||||
lambda s: s.endswith("TemplateSyntaxError")
|
||||
),
|
||||
}
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
# ignore response for exception tests
|
||||
web_request.wait_for_response()
|
||||
|
||||
# shutdown to web server
|
||||
link = base_link + 'exit' if base_link.endswith('/') else '/exit'
|
||||
link = base_link + "exit" if base_link.endswith("/") else "/exit"
|
||||
get_web_content(link).wait_for_response()
|
||||
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('ex_type', ['handled', 'unhandled'])
|
||||
@pytest.mark.parametrize('start_method', ['launch', 'attach_socket_cmdline'])
|
||||
@pytest.mark.parametrize("ex_type", ["handled", "unhandled"])
|
||||
@pytest.mark.parametrize("start_method", ["launch", "attach_socket_cmdline"])
|
||||
@pytest.mark.timeout(60)
|
||||
def test_flask_exception_no_multiproc(ex_type, start_method):
|
||||
ex_line = {
|
||||
'handled': 21,
|
||||
'unhandled': 33,
|
||||
}[ex_type]
|
||||
ex_line = {"handled": 21, "unhandled": 33}[ex_type]
|
||||
|
||||
with debug.Session() as session:
|
||||
_initialize_flask_session_no_multiproc(session, start_method)
|
||||
|
||||
session.send_request('setExceptionBreakpoints', arguments={
|
||||
'filters': ['raised', 'uncaught'],
|
||||
}).wait_for_response()
|
||||
session.send_request(
|
||||
"setExceptionBreakpoints", arguments={"filters": ["raised", "uncaught"]}
|
||||
).wait_for_response()
|
||||
|
||||
session.start_debugging()
|
||||
|
||||
# wait for Flask web server to start
|
||||
wait_for_connection(FLASK_PORT)
|
||||
base_link = FLASK_LINK
|
||||
link = base_link + ex_type if base_link.endswith('/') else ('/' + ex_type)
|
||||
link = base_link + ex_type if base_link.endswith("/") else ("/" + ex_type)
|
||||
web_request = get_web_content(link, {})
|
||||
|
||||
thread_stopped = session.wait_for_next(Event('stopped', ANY.dict_with({'reason': 'exception'})))
|
||||
assert thread_stopped == Event('stopped', ANY.dict_with({
|
||||
'reason': 'exception',
|
||||
'text': ANY.such_that(lambda s: s.endswith('ArithmeticError')),
|
||||
'description': 'Hello'
|
||||
}))
|
||||
thread_stopped = session.wait_for_next(
|
||||
Event("stopped", some.dict.containing({"reason": "exception"}))
|
||||
)
|
||||
assert thread_stopped == Event(
|
||||
"stopped",
|
||||
some.dict.containing(
|
||||
{
|
||||
"reason": "exception",
|
||||
"text": some.str.such_that(lambda s: s.endswith("ArithmeticError")),
|
||||
"description": "Hello",
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
tid = thread_stopped.body['threadId']
|
||||
tid = thread_stopped.body["threadId"]
|
||||
resp_exception_info = session.send_request(
|
||||
'exceptionInfo',
|
||||
arguments={'threadId': tid, }
|
||||
"exceptionInfo", arguments={"threadId": tid}
|
||||
).wait_for_response()
|
||||
exception = resp_exception_info.body
|
||||
assert exception == {
|
||||
'exceptionId': ANY.such_that(lambda s: s.endswith('ArithmeticError')),
|
||||
'breakMode': 'always',
|
||||
'description': 'Hello',
|
||||
'details': {
|
||||
'message': 'Hello',
|
||||
'typeName': ANY.such_that(lambda s: s.endswith('ArithmeticError')),
|
||||
'source': Path(FLASK1_APP),
|
||||
'stackTrace': ANY.such_that(lambda s: True)
|
||||
}
|
||||
}
|
||||
|
||||
resp_stacktrace = session.send_request('stackTrace', arguments={
|
||||
'threadId': tid,
|
||||
}).wait_for_response()
|
||||
assert resp_stacktrace.body['totalFrames'] > 0
|
||||
frames = resp_stacktrace.body['stackFrames']
|
||||
assert frames[0] == {
|
||||
'id': ANY.dap_id,
|
||||
'name': 'bad_route_' + ex_type,
|
||||
'source': {
|
||||
'sourceReference': ANY.dap_id,
|
||||
'path': Path(FLASK1_APP),
|
||||
"exceptionId": some.str.such_that(lambda s: s.endswith("ArithmeticError")),
|
||||
"breakMode": "always",
|
||||
"description": "Hello",
|
||||
"details": {
|
||||
"message": "Hello",
|
||||
"typeName": some.str.such_that(lambda s: s.endswith("ArithmeticError")),
|
||||
"source": some.path(FLASK1_APP),
|
||||
"stackTrace": some.str.such_that(lambda s: True),
|
||||
},
|
||||
'line': ex_line,
|
||||
'column': 1,
|
||||
}
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
resp_stacktrace = session.send_request(
|
||||
"stackTrace", arguments={"threadId": tid}
|
||||
).wait_for_response()
|
||||
assert resp_stacktrace.body["totalFrames"] > 0
|
||||
frames = resp_stacktrace.body["stackFrames"]
|
||||
assert frames[0] == {
|
||||
"id": some.dap_id,
|
||||
"name": "bad_route_" + ex_type,
|
||||
"source": {"sourceReference": some.dap_id, "path": some.path(FLASK1_APP)},
|
||||
"line": ex_line,
|
||||
"column": 1,
|
||||
}
|
||||
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
# ignore response for exception tests
|
||||
web_request.wait_for_response()
|
||||
|
||||
# shutdown to web server
|
||||
link = base_link + 'exit' if base_link.endswith('/') else '/exit'
|
||||
link = base_link + "exit" if base_link.endswith("/") else "/exit"
|
||||
get_web_content(link).wait_for_response()
|
||||
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.timeout(120)
|
||||
@pytest.mark.parametrize('start_method', ['launch'])
|
||||
@pytest.mark.skipif((sys.version_info < (3, 0)) and (platform.system() != 'Windows'), reason='Bug #935')
|
||||
@pytest.mark.parametrize("start_method", ["launch"])
|
||||
@pytest.mark.skipif(
|
||||
(sys.version_info < (3, 0)) and (platform.system() != "Windows"), reason="Bug #935"
|
||||
)
|
||||
def test_flask_breakpoint_multiproc(start_method):
|
||||
env = {
|
||||
'FLASK_APP': 'app',
|
||||
'FLASK_ENV': 'development',
|
||||
'FLASK_DEBUG': '1',
|
||||
}
|
||||
if platform.system() != 'Windows':
|
||||
locale = 'en_US.utf8' if platform.system() == 'Linux' else 'en_US.UTF-8'
|
||||
env.update({
|
||||
'LC_ALL': locale,
|
||||
'LANG': locale,
|
||||
})
|
||||
env = {"FLASK_APP": "app", "FLASK_ENV": "development", "FLASK_DEBUG": "1"}
|
||||
if platform.system() != "Windows":
|
||||
locale = "en_US.utf8" if platform.system() == "Linux" else "en_US.UTF-8"
|
||||
env.update({"LC_ALL": locale, "LANG": locale})
|
||||
|
||||
with debug.Session() as parent_session:
|
||||
parent_session.initialize(
|
||||
start_method=start_method,
|
||||
target=('module', 'flask'),
|
||||
target=("module", "flask"),
|
||||
multiprocess=True,
|
||||
program_args=['run', '--port', str(FLASK_PORT)],
|
||||
ignore_unobserved=[Event('stopped')],
|
||||
debug_options=['Jinja'],
|
||||
program_args=["run", "--port", str(FLASK_PORT)],
|
||||
ignore_unobserved=[Event("stopped")],
|
||||
debug_options=["Jinja"],
|
||||
cwd=FLASK1_ROOT,
|
||||
env=env,
|
||||
expected_returncode=ANY.int, # No clean way to kill Flask server
|
||||
expected_returncode=some.int, # No clean way to kill Flask server
|
||||
)
|
||||
|
||||
bp_line = 11
|
||||
bp_var_content = 'Flask-Jinja-Test'
|
||||
bp_line = app_py_lines["bphome"]
|
||||
bp_var_content = "Flask-Jinja-Test"
|
||||
parent_session.set_breakpoints(FLASK1_APP, [bp_line])
|
||||
parent_session.start_debugging()
|
||||
|
||||
with parent_session.connect_to_next_child_session() as child_session:
|
||||
child_session.send_request('setBreakpoints', arguments={
|
||||
'source': {'path': FLASK1_APP},
|
||||
'breakpoints': [{'line': bp_line}, ],
|
||||
}).wait_for_response()
|
||||
child_session.send_request(
|
||||
"setBreakpoints",
|
||||
arguments={
|
||||
"source": {"path": FLASK1_APP},
|
||||
"breakpoints": [{"line": bp_line}],
|
||||
},
|
||||
).wait_for_response()
|
||||
child_session.start_debugging()
|
||||
|
||||
# wait for Flask server to start
|
||||
wait_for_connection(FLASK_PORT)
|
||||
web_request = get_web_content(FLASK_LINK, {})
|
||||
|
||||
thread_stopped = child_session.wait_for_next(Event('stopped', ANY.dict_with({'reason': 'breakpoint'})))
|
||||
assert thread_stopped.body['threadId'] is not None
|
||||
thread_stopped = child_session.wait_for_next(
|
||||
Event("stopped", some.dict.containing({"reason": "breakpoint"}))
|
||||
)
|
||||
assert thread_stopped.body["threadId"] is not None
|
||||
|
||||
tid = thread_stopped.body['threadId']
|
||||
tid = thread_stopped.body["threadId"]
|
||||
|
||||
resp_stacktrace = child_session.send_request('stackTrace', arguments={
|
||||
'threadId': tid,
|
||||
}).wait_for_response()
|
||||
assert resp_stacktrace.body['totalFrames'] > 0
|
||||
frames = resp_stacktrace.body['stackFrames']
|
||||
resp_stacktrace = child_session.send_request(
|
||||
"stackTrace", arguments={"threadId": tid}
|
||||
).wait_for_response()
|
||||
assert resp_stacktrace.body["totalFrames"] > 0
|
||||
frames = resp_stacktrace.body["stackFrames"]
|
||||
assert frames[0] == {
|
||||
'id': ANY.dap_id,
|
||||
'name': 'home',
|
||||
'source': {
|
||||
'sourceReference': ANY.dap_id,
|
||||
'path': Path(FLASK1_APP),
|
||||
"id": some.dap_id,
|
||||
"name": "home",
|
||||
"source": {
|
||||
"sourceReference": some.dap_id,
|
||||
"path": some.path(FLASK1_APP),
|
||||
},
|
||||
'line': bp_line,
|
||||
'column': 1,
|
||||
"line": bp_line,
|
||||
"column": 1,
|
||||
}
|
||||
|
||||
fid = frames[0]['id']
|
||||
resp_scopes = child_session.send_request('scopes', arguments={
|
||||
'frameId': fid
|
||||
}).wait_for_response()
|
||||
scopes = resp_scopes.body['scopes']
|
||||
fid = frames[0]["id"]
|
||||
resp_scopes = child_session.send_request(
|
||||
"scopes", arguments={"frameId": fid}
|
||||
).wait_for_response()
|
||||
scopes = resp_scopes.body["scopes"]
|
||||
assert len(scopes) > 0
|
||||
|
||||
resp_variables = child_session.send_request('variables', arguments={
|
||||
'variablesReference': scopes[0]['variablesReference']
|
||||
}).wait_for_response()
|
||||
variables = [v for v in resp_variables.body['variables'] if v['name'] == 'content']
|
||||
assert variables == [{
|
||||
'name': 'content',
|
||||
'type': 'str',
|
||||
'value': repr(bp_var_content),
|
||||
'presentationHint': {'attributes': ['rawString']},
|
||||
'evaluateName': 'content',
|
||||
'variablesReference': 0,
|
||||
}]
|
||||
resp_variables = child_session.send_request(
|
||||
"variables",
|
||||
arguments={"variablesReference": scopes[0]["variablesReference"]},
|
||||
).wait_for_response()
|
||||
variables = [
|
||||
v for v in resp_variables.body["variables"] if v["name"] == "content"
|
||||
]
|
||||
assert variables == [
|
||||
{
|
||||
"name": "content",
|
||||
"type": "str",
|
||||
"value": repr(bp_var_content),
|
||||
"presentationHint": {"attributes": ["rawString"]},
|
||||
"evaluateName": "content",
|
||||
"variablesReference": 0,
|
||||
}
|
||||
]
|
||||
|
||||
child_session.send_request('continue').wait_for_response(freeze=False)
|
||||
child_session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
web_content = web_request.wait_for_response()
|
||||
assert web_content.find(bp_var_content) != -1
|
||||
|
||||
# shutdown to web server
|
||||
link = FLASK_LINK + 'exit'
|
||||
link = FLASK_LINK + "exit"
|
||||
get_web_content(link).wait_for_response()
|
||||
|
||||
child_session.wait_for_termination()
|
||||
|
|
|
|||
|
|
@ -1,37 +0,0 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import pytest
|
||||
import ptvsd
|
||||
|
||||
from ptvsd.server.wrapper import InternalsFilter
|
||||
|
||||
|
||||
INTERNAL_DIR = os.path.dirname(os.path.abspath(ptvsd.__file__))
|
||||
|
||||
|
||||
@pytest.mark.parametrize('path', [
|
||||
os.path.abspath(ptvsd.__file__),
|
||||
# File used by VS/VSC to launch ptvsd
|
||||
os.path.join('somepath', 'ptvsd_launcher.py'),
|
||||
# Any file under ptvsd
|
||||
os.path.join(INTERNAL_DIR, 'somefile.py'),
|
||||
])
|
||||
|
||||
|
||||
def test_internal_paths(path):
|
||||
int_filter = InternalsFilter()
|
||||
assert int_filter.is_internal_path(path)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('path', [
|
||||
__file__,
|
||||
os.path.join('somepath', 'somefile.py'),
|
||||
])
|
||||
def test_user_file_paths(path):
|
||||
int_filter = InternalsFilter()
|
||||
assert not int_filter.is_internal_path(path)
|
||||
|
|
@ -10,51 +10,54 @@ from tests import debug
|
|||
from tests.patterns import some
|
||||
|
||||
|
||||
@pytest.mark.parametrize('jmc', ['jmcOn', 'jmcOff'])
|
||||
@pytest.mark.parametrize("jmc", ["jmcOn", "jmcOff"])
|
||||
def test_justmycode_frames(pyfile, start_method, run_as, jmc):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
print('break here') #@bp
|
||||
import debug_me # noqa
|
||||
|
||||
print("break here") # @bp
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
debug_options=[] if jmc == 'jmcOn' else ['DebugStdLib']
|
||||
debug_options=[] if jmc == "jmcOn" else ["DebugStdLib"],
|
||||
)
|
||||
|
||||
bp_line = line_numbers['bp']
|
||||
bp_line = code_to_debug.lines["bp"]
|
||||
|
||||
actual_bps = session.set_breakpoints(code_to_debug, [bp_line])
|
||||
actual_bps = [bp['line'] for bp in actual_bps]
|
||||
actual_bps = [bp["line"] for bp in actual_bps]
|
||||
session.start_debugging()
|
||||
|
||||
hit = session.wait_for_thread_stopped()
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert frames[0] == ANY.dict_with({
|
||||
'line': bp_line,
|
||||
'source': ANY.dict_with({
|
||||
'path': Path(code_to_debug)
|
||||
})
|
||||
})
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert frames[0] == some.dict.containing(
|
||||
{
|
||||
"line": bp_line,
|
||||
"source": some.dict.containing({"path": some.path(code_to_debug)}),
|
||||
}
|
||||
)
|
||||
|
||||
if jmc == 'jmcOn':
|
||||
if jmc == "jmcOn":
|
||||
assert len(frames) == 1
|
||||
session.send_request('stepIn', {'threadId': hit.thread_id}).wait_for_response()
|
||||
session.send_request(
|
||||
"stepIn", {"threadId": hit.thread_id}
|
||||
).wait_for_response()
|
||||
# 'step' should terminate the debuggee
|
||||
else:
|
||||
assert len(frames) >= 1
|
||||
session.send_request('stepIn', {'threadId': hit.thread_id}).wait_for_response()
|
||||
session.send_request(
|
||||
"stepIn", {"threadId": hit.thread_id}
|
||||
).wait_for_response()
|
||||
|
||||
# 'step' should enter stdlib
|
||||
hit2 = session.wait_for_thread_stopped()
|
||||
frames2 = hit2.stacktrace.body['stackFrames']
|
||||
assert frames2[0]['source']['path'] != Path(code_to_debug)
|
||||
frames2 = hit2.stacktrace.body["stackFrames"]
|
||||
assert frames2[0]["source"]["path"] != some.path(code_to_debug)
|
||||
|
||||
# 'continue' should terminate the debuggee
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -12,30 +12,31 @@ from tests import debug
|
|||
|
||||
@contextlib.contextmanager
|
||||
def check_logs(tmpdir, session):
|
||||
assert not tmpdir.listdir('ptvsd-*.log')
|
||||
assert not tmpdir.listdir("ptvsd-*.log")
|
||||
yield
|
||||
assert len(tmpdir.listdir('ptvsd-*.log')) == 1
|
||||
log_name = 'ptvsd-{}.log'.format(session.pid)
|
||||
assert len(tmpdir.listdir("ptvsd-*.log")) == 1
|
||||
log_name = "ptvsd-{}.log".format(session.pid)
|
||||
assert tmpdir.join(log_name).size() > 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize('cli', ['arg', 'env'])
|
||||
@pytest.mark.parametrize("cli", ["arg", "env"])
|
||||
def test_log_cli(pyfile, tmpdir, start_method, run_as, cli):
|
||||
if cli == 'arg' and start_method == 'attach_socket_import':
|
||||
if cli == "arg" and start_method == "attach_socket_import":
|
||||
pytest.skip()
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
|
||||
with debug.Session() as session:
|
||||
with check_logs(tmpdir, session):
|
||||
if cli == 'arg':
|
||||
if cli == "arg":
|
||||
session.log_dir = str(tmpdir)
|
||||
else:
|
||||
session.env['PTVSD_LOG_DIR'] = str(tmpdir)
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.env["PTVSD_LOG_DIR"] = str(tmpdir)
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug), start_method=start_method
|
||||
)
|
||||
session.start_debugging()
|
||||
session.wait_for_exit()
|
||||
|
||||
|
|
@ -43,13 +44,16 @@ def test_log_cli(pyfile, tmpdir, start_method, run_as, cli):
|
|||
def test_log_api(pyfile, tmpdir, run_as):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import sys
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger(log_dir=str(sys.argv[1]))
|
||||
# import sys
|
||||
import debug_me # noqa
|
||||
|
||||
# import_and_enable_debugger(log_dir=str(sys.argv[1]))
|
||||
|
||||
with debug.Session() as session:
|
||||
with check_logs(tmpdir, session):
|
||||
session.program_args += [str(tmpdir)]
|
||||
session.initialize(target=(run_as, code_to_debug), start_method='attach_socket_import')
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug), start_method="attach_socket_import"
|
||||
)
|
||||
session.start_debugging()
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -14,125 +14,145 @@ from tests.timeline import Event, Request
|
|||
|
||||
|
||||
@pytest.mark.timeout(30)
|
||||
@pytest.mark.skipif(platform.system() != 'Windows',
|
||||
reason='Debugging multiprocessing module only works on Windows')
|
||||
@pytest.mark.parametrize('start_method', ['launch', 'attach_socket_cmdline'])
|
||||
@pytest.mark.skipif(
|
||||
platform.system() != "Windows",
|
||||
reason="Debugging multiprocessing module only works on Windows",
|
||||
)
|
||||
@pytest.mark.parametrize("start_method", ["launch", "attach_socket_cmdline"])
|
||||
def test_multiprocessing(pyfile, start_method, run_as):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import multiprocessing
|
||||
import platform
|
||||
import sys
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
|
||||
def child_of_child(q):
|
||||
print('entering child of child')
|
||||
print("entering child of child")
|
||||
assert q.get() == 2
|
||||
q.put(3)
|
||||
print('leaving child of child')
|
||||
print("leaving child of child")
|
||||
|
||||
def child(q):
|
||||
print('entering child')
|
||||
print("entering child")
|
||||
assert q.get() == 1
|
||||
|
||||
print('spawning child of child')
|
||||
print("spawning child of child")
|
||||
p = multiprocessing.Process(target=child_of_child, args=(q,))
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
assert q.get() == 3
|
||||
q.put(4)
|
||||
print('leaving child')
|
||||
print("leaving child")
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
import backchannel
|
||||
if sys.version_info >= (3, 4):
|
||||
multiprocessing.set_start_method('spawn')
|
||||
else:
|
||||
assert platform.system() == 'Windows'
|
||||
|
||||
print('spawning child')
|
||||
if sys.version_info >= (3, 4):
|
||||
multiprocessing.set_start_method("spawn")
|
||||
else:
|
||||
assert platform.system() == "Windows"
|
||||
|
||||
print("spawning child")
|
||||
q = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(target=child, args=(q,))
|
||||
p.start()
|
||||
print('child spawned')
|
||||
print("child spawned")
|
||||
backchannel.write_json(p.pid)
|
||||
|
||||
q.put(1)
|
||||
assert backchannel.read_json() == 'continue'
|
||||
assert backchannel.read_json() == "continue"
|
||||
q.put(2)
|
||||
p.join()
|
||||
assert q.get() == 4
|
||||
q.close()
|
||||
backchannel.write_json('done')
|
||||
backchannel.write_json("done")
|
||||
|
||||
with debug.Session() as parent_session:
|
||||
parent_session.initialize(multiprocess=True, target=(run_as, code_to_debug), start_method=start_method, use_backchannel=True)
|
||||
parent_session.initialize(
|
||||
multiprocess=True,
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
use_backchannel=True,
|
||||
)
|
||||
parent_session.start_debugging()
|
||||
|
||||
root_start_request, = parent_session.all_occurrences_of(Request('launch') | Request('attach'))
|
||||
root_process, = parent_session.all_occurrences_of(Event('process'))
|
||||
root_pid = int(root_process.body['systemProcessId'])
|
||||
root_start_request, = parent_session.all_occurrences_of(
|
||||
Request("launch") | Request("attach")
|
||||
)
|
||||
root_process, = parent_session.all_occurrences_of(Event("process"))
|
||||
root_pid = int(root_process.body["systemProcessId"])
|
||||
|
||||
child_pid = parent_session.read_json()
|
||||
|
||||
child_subprocess = parent_session.wait_for_next(Event('ptvsd_subprocess'))
|
||||
assert child_subprocess == Event('ptvsd_subprocess', {
|
||||
'rootProcessId': root_pid,
|
||||
'parentProcessId': root_pid,
|
||||
'processId': child_pid,
|
||||
'port': ANY.int,
|
||||
'rootStartRequest': {
|
||||
'seq': ANY.int,
|
||||
'type': 'request',
|
||||
'command': root_start_request.command,
|
||||
'arguments': root_start_request.arguments,
|
||||
}
|
||||
})
|
||||
child_subprocess = parent_session.wait_for_next(Event("ptvsd_subprocess"))
|
||||
assert child_subprocess == Event(
|
||||
"ptvsd_subprocess",
|
||||
{
|
||||
"rootProcessId": root_pid,
|
||||
"parentProcessId": root_pid,
|
||||
"processId": child_pid,
|
||||
"port": some.int,
|
||||
"rootStartRequest": {
|
||||
"seq": some.int,
|
||||
"type": "request",
|
||||
"command": root_start_request.command,
|
||||
"arguments": root_start_request.arguments,
|
||||
},
|
||||
},
|
||||
)
|
||||
parent_session.proceed()
|
||||
|
||||
with parent_session.connect_to_child_session(child_subprocess) as child_session:
|
||||
child_session.start_debugging()
|
||||
|
||||
grandchild_subprocess = parent_session.wait_for_next(Event('ptvsd_subprocess'))
|
||||
assert grandchild_subprocess == Event('ptvsd_subprocess', {
|
||||
'rootProcessId': root_pid,
|
||||
'parentProcessId': child_pid,
|
||||
'processId': ANY.int,
|
||||
'port': ANY.int,
|
||||
'rootStartRequest': {
|
||||
'seq': ANY.int,
|
||||
'type': 'request',
|
||||
'command': root_start_request.command,
|
||||
'arguments': root_start_request.arguments,
|
||||
}
|
||||
})
|
||||
grandchild_subprocess = parent_session.wait_for_next(
|
||||
Event("ptvsd_subprocess")
|
||||
)
|
||||
assert grandchild_subprocess == Event(
|
||||
"ptvsd_subprocess",
|
||||
{
|
||||
"rootProcessId": root_pid,
|
||||
"parentProcessId": child_pid,
|
||||
"processId": some.int,
|
||||
"port": some.int,
|
||||
"rootStartRequest": {
|
||||
"seq": some.int,
|
||||
"type": "request",
|
||||
"command": root_start_request.command,
|
||||
"arguments": root_start_request.arguments,
|
||||
},
|
||||
},
|
||||
)
|
||||
parent_session.proceed()
|
||||
|
||||
with parent_session.connect_to_child_session(grandchild_subprocess) as grandchild_session:
|
||||
with parent_session.connect_to_child_session(
|
||||
grandchild_subprocess
|
||||
) as grandchild_session:
|
||||
grandchild_session.start_debugging()
|
||||
|
||||
parent_session.write_json('continue')
|
||||
parent_session.write_json("continue")
|
||||
|
||||
grandchild_session.wait_for_termination()
|
||||
child_session.wait_for_termination()
|
||||
|
||||
assert parent_session.read_json() == 'done'
|
||||
assert parent_session.read_json() == "done"
|
||||
parent_session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.timeout(30)
|
||||
@pytest.mark.skipif(sys.version_info < (3, 0) and (platform.system() != 'Windows'),
|
||||
reason='Bug #935')
|
||||
@pytest.mark.parametrize('start_method', ['launch', 'attach_socket_cmdline'])
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info < (3, 0) and (platform.system() != "Windows"), reason="Bug #935"
|
||||
)
|
||||
@pytest.mark.parametrize("start_method", ["launch", "attach_socket_cmdline"])
|
||||
def test_subprocess(pyfile, start_method, run_as):
|
||||
@pyfile
|
||||
def child():
|
||||
import sys
|
||||
import backchannel
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
|
||||
backchannel.write_json(sys.argv)
|
||||
|
||||
@pyfile
|
||||
|
|
@ -140,56 +160,73 @@ def test_subprocess(pyfile, start_method, run_as):
|
|||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
argv = [sys.executable, sys.argv[1], '--arg1', '--arg2', '--arg3']
|
||||
import debug_me # noqa
|
||||
|
||||
argv = [sys.executable, sys.argv[1], "--arg1", "--arg2", "--arg3"]
|
||||
env = os.environ.copy()
|
||||
process = subprocess.Popen(argv, env=env, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
process = subprocess.Popen(
|
||||
argv,
|
||||
env=env,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
process.wait()
|
||||
|
||||
with debug.Session() as parent_session:
|
||||
parent_session.program_args += [child]
|
||||
parent_session.initialize(multiprocess=True, target=(run_as, parent), start_method=start_method, use_backchannel=True)
|
||||
parent_session.initialize(
|
||||
multiprocess=True,
|
||||
target=(run_as, parent),
|
||||
start_method=start_method,
|
||||
use_backchannel=True,
|
||||
)
|
||||
parent_session.start_debugging()
|
||||
|
||||
root_start_request, = parent_session.all_occurrences_of(Request('launch') | Request('attach'))
|
||||
root_process, = parent_session.all_occurrences_of(Event('process'))
|
||||
root_pid = int(root_process.body['systemProcessId'])
|
||||
root_start_request, = parent_session.all_occurrences_of(
|
||||
Request("launch") | Request("attach")
|
||||
)
|
||||
root_process, = parent_session.all_occurrences_of(Event("process"))
|
||||
root_pid = int(root_process.body["systemProcessId"])
|
||||
|
||||
child_subprocess = parent_session.wait_for_next(Event('ptvsd_subprocess'))
|
||||
assert child_subprocess == Event('ptvsd_subprocess', {
|
||||
'rootProcessId': root_pid,
|
||||
'parentProcessId': root_pid,
|
||||
'processId': ANY.int,
|
||||
'port': ANY.int,
|
||||
'rootStartRequest': {
|
||||
'seq': ANY.int,
|
||||
'type': 'request',
|
||||
'command': root_start_request.command,
|
||||
'arguments': root_start_request.arguments,
|
||||
}
|
||||
})
|
||||
child_subprocess = parent_session.wait_for_next(Event("ptvsd_subprocess"))
|
||||
assert child_subprocess == Event(
|
||||
"ptvsd_subprocess",
|
||||
{
|
||||
"rootProcessId": root_pid,
|
||||
"parentProcessId": root_pid,
|
||||
"processId": some.int,
|
||||
"port": some.int,
|
||||
"rootStartRequest": {
|
||||
"seq": some.int,
|
||||
"type": "request",
|
||||
"command": root_start_request.command,
|
||||
"arguments": root_start_request.arguments,
|
||||
},
|
||||
},
|
||||
)
|
||||
parent_session.proceed()
|
||||
|
||||
with parent_session.connect_to_child_session(child_subprocess) as child_session:
|
||||
child_session.start_debugging()
|
||||
|
||||
child_argv = parent_session.read_json()
|
||||
assert child_argv == [child, '--arg1', '--arg2', '--arg3']
|
||||
assert child_argv == [child, "--arg1", "--arg2", "--arg3"]
|
||||
|
||||
child_session.wait_for_termination()
|
||||
parent_session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.timeout(30)
|
||||
@pytest.mark.skipif(sys.version_info < (3, 0) and (platform.system() != 'Windows'),
|
||||
reason='Bug #935')
|
||||
@pytest.mark.parametrize('start_method', ['launch', 'attach_socket_cmdline'])
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info < (3, 0) and (platform.system() != "Windows"), reason="Bug #935"
|
||||
)
|
||||
@pytest.mark.parametrize("start_method", ["launch", "attach_socket_cmdline"])
|
||||
def test_autokill(pyfile, start_method, run_as):
|
||||
@pyfile
|
||||
def child():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
|
||||
while True:
|
||||
pass
|
||||
|
||||
|
|
@ -199,25 +236,36 @@ def test_autokill(pyfile, start_method, run_as):
|
|||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
|
||||
argv = [sys.executable, sys.argv[1]]
|
||||
env = os.environ.copy()
|
||||
subprocess.Popen(argv, env=env, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
subprocess.Popen(
|
||||
argv,
|
||||
env=env,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
backchannel.read_json()
|
||||
|
||||
with debug.Session() as parent_session:
|
||||
parent_session.program_args += [child]
|
||||
parent_session.initialize(multiprocess=True, target=(run_as, parent), start_method=start_method, use_backchannel=True)
|
||||
parent_session.initialize(
|
||||
multiprocess=True,
|
||||
target=(run_as, parent),
|
||||
start_method=start_method,
|
||||
use_backchannel=True,
|
||||
)
|
||||
parent_session.start_debugging()
|
||||
|
||||
with parent_session.connect_to_next_child_session() as child_session:
|
||||
child_session.start_debugging()
|
||||
|
||||
if parent_session.start_method == 'launch':
|
||||
if parent_session.start_method == "launch":
|
||||
# In launch scenario, terminate the parent process by disconnecting from it.
|
||||
parent_session.expected_returncode = ANY
|
||||
parent_session.send_request('disconnect')
|
||||
parent_session.expected_returncode = some.int
|
||||
parent_session.send_request("disconnect")
|
||||
parent_session.wait_for_disconnect()
|
||||
else:
|
||||
# In attach scenario, just let the parent process run to completion.
|
||||
|
|
@ -228,44 +276,46 @@ def test_autokill(pyfile, start_method, run_as):
|
|||
parent_session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.version_info < (3, 0) and (platform.system() != 'Windows'),
|
||||
reason='Bug #935')
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info < (3, 0) and (platform.system() != "Windows"), reason="Bug #935"
|
||||
)
|
||||
def test_argv_quoting(pyfile, start_method, run_as):
|
||||
@pyfile
|
||||
def args():
|
||||
# import_and_enable_debugger
|
||||
args = [ # noqa
|
||||
r'regular',
|
||||
r'',
|
||||
r'with spaces'
|
||||
r'"quoted"',
|
||||
import debug_me # noqa
|
||||
|
||||
args = [ # noqa
|
||||
r"regular",
|
||||
r"",
|
||||
r"with spaces" r'"quoted"',
|
||||
r'" quote at start',
|
||||
r'quote at end "',
|
||||
r'quote in " the middle',
|
||||
r'quotes "in the" middle',
|
||||
r'\path with\spaces',
|
||||
r'\path\with\terminal\backslash' + '\\',
|
||||
r'backslash \" before quote',
|
||||
r"\path with\spaces",
|
||||
r"\path\with\terminal\backslash" + "\\",
|
||||
r"backslash \" before quote",
|
||||
]
|
||||
|
||||
@pyfile
|
||||
def parent():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
from args import args
|
||||
|
||||
child = sys.argv[1]
|
||||
subprocess.check_call([sys.executable] + [child] + args)
|
||||
|
||||
@pyfile
|
||||
def child():
|
||||
# import_and_enable_debugger
|
||||
import debug_me # noqa
|
||||
import backchannel
|
||||
import sys
|
||||
|
||||
from args import args as expected_args
|
||||
|
||||
backchannel.write_json(expected_args)
|
||||
|
||||
actual_args = sys.argv[1:]
|
||||
|
|
|
|||
|
|
@ -12,82 +12,77 @@ from tests.timeline import Event
|
|||
|
||||
|
||||
def test_with_no_output(pyfile, start_method, run_as):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
|
||||
# Do nothing, and check if there is any output
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.start_debugging()
|
||||
session.wait_for_exit()
|
||||
assert b'' == session.get_stdout_as_string()
|
||||
assert b'' == session.get_stderr_as_string()
|
||||
assert b"" == session.get_stdout_as_string()
|
||||
assert b"" == session.get_stderr_as_string()
|
||||
|
||||
|
||||
def test_with_tab_in_output(pyfile, start_method, run_as):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
a = '\t'.join(('Hello', 'World'))
|
||||
import debug_me # noqa
|
||||
|
||||
a = "\t".join(("Hello", "World"))
|
||||
print(a)
|
||||
# Break here so we are sure to get the output event.
|
||||
a = 1 # @bp1
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
|
||||
session.set_breakpoints(code_to_debug, [line_numbers['bp1']])
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["bp1"]])
|
||||
session.start_debugging()
|
||||
|
||||
# Breakpoint at the end just to make sure we get all output events.
|
||||
session.wait_for_thread_stopped()
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
||||
output = session.all_occurrences_of(Event('output', ANY.dict_with({'category': 'stdout'})))
|
||||
output_str = ''.join(o.body['output'] for o in output)
|
||||
assert output_str.startswith('Hello\tWorld')
|
||||
output = session.all_occurrences_of(
|
||||
Event("output", some.dict.containing({"category": "stdout"}))
|
||||
)
|
||||
output_str = "".join(o.body["output"] for o in output)
|
||||
assert output_str.startswith("Hello\tWorld")
|
||||
|
||||
|
||||
@pytest.mark.parametrize('redirect', ['RedirectOutput', ''])
|
||||
@pytest.mark.parametrize("redirect", ["RedirectOutput", ""])
|
||||
def test_redirect_output(pyfile, start_method, run_as, redirect):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
|
||||
for i in [111, 222, 333, 444]:
|
||||
print(i)
|
||||
|
||||
print() # @bp1
|
||||
print() # @bp1
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
with debug.Session() as session:
|
||||
# By default 'RedirectOutput' is always set. So using this way
|
||||
# to override the default in session.
|
||||
session.debug_options = [redirect] if bool(redirect) else []
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
|
||||
session.set_breakpoints(code_to_debug, [line_numbers['bp1']])
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["bp1"]])
|
||||
session.start_debugging()
|
||||
|
||||
# Breakpoint at the end just to make sure we get all output events.
|
||||
session.wait_for_thread_stopped()
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
||||
output = session.all_occurrences_of(Event('output', ANY.dict_with({'category': 'stdout'})))
|
||||
expected = ['111', '222', '333', '444'] if bool(redirect) else []
|
||||
assert expected == list(o.body['output'] for o in output if len(o.body['output']) == 3)
|
||||
output = session.all_occurrences_of(
|
||||
Event("output", some.dict.containing({"category": "stdout"}))
|
||||
)
|
||||
expected = ["111", "222", "333", "444"] if bool(redirect) else []
|
||||
assert expected == list(
|
||||
o.body["output"] for o in output if len(o.body["output"]) == 3
|
||||
)
|
||||
|
|
|
|||
|
|
@ -11,42 +11,55 @@ from ptvsd.server import options, __main__
|
|||
from tests.patterns import some
|
||||
|
||||
|
||||
EXPECTED_EXTRA = ['--']
|
||||
EXPECTED_EXTRA = ["--"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('target_kind', ['file', 'module', 'code'])
|
||||
@pytest.mark.parametrize('client', ['', 'client'])
|
||||
@pytest.mark.parametrize('wait', ['', 'wait'])
|
||||
@pytest.mark.parametrize('nodebug', ['', 'nodebug'])
|
||||
@pytest.mark.parametrize('multiproc', ['', 'multiproc'])
|
||||
@pytest.mark.parametrize('extra', ['', 'extra'])
|
||||
@pytest.mark.parametrize("target_kind", ["file", "module", "code"])
|
||||
@pytest.mark.parametrize("client", ["", "client"])
|
||||
@pytest.mark.parametrize("wait", ["", "wait"])
|
||||
@pytest.mark.parametrize("nodebug", ["", "nodebug"])
|
||||
@pytest.mark.parametrize("multiproc", ["", "multiproc"])
|
||||
@pytest.mark.parametrize("extra", ["", "extra"])
|
||||
def test_targets(target_kind, client, wait, nodebug, multiproc, extra):
|
||||
args = ['--host', 'localhost', '--port', '8888']
|
||||
args = ["--host", "localhost", "--port", "8888"]
|
||||
|
||||
if client:
|
||||
args += ['--client']
|
||||
args += ["--client"]
|
||||
|
||||
if wait:
|
||||
args += ['--wait']
|
||||
args += ["--wait"]
|
||||
|
||||
if nodebug:
|
||||
args += ['--nodebug']
|
||||
args += ["--nodebug"]
|
||||
|
||||
if multiproc:
|
||||
args += ['--multiprocess']
|
||||
args += ["--multiprocess"]
|
||||
|
||||
if target_kind == 'file':
|
||||
target = 'spam.py'
|
||||
if target_kind == "file":
|
||||
target = "spam.py"
|
||||
args += [target]
|
||||
elif target_kind == 'module':
|
||||
target = 'spam'
|
||||
args += ['-m', target]
|
||||
elif target_kind == 'code':
|
||||
target = '123'
|
||||
args += ['-c', target]
|
||||
elif target_kind == "module":
|
||||
target = "spam"
|
||||
args += ["-m", target]
|
||||
elif target_kind == "code":
|
||||
target = "123"
|
||||
args += ["-c", target]
|
||||
|
||||
if extra:
|
||||
extra = ['ham', '--client', '--wait', '-y', 'spam', '--', '--nodebug', '--host', '--port', '-c', '--something', '-m']
|
||||
extra = [
|
||||
"ham",
|
||||
"--client",
|
||||
"--wait",
|
||||
"-y",
|
||||
"spam",
|
||||
"--",
|
||||
"--nodebug",
|
||||
"--host",
|
||||
"--port",
|
||||
"-c",
|
||||
"--something",
|
||||
"-m",
|
||||
]
|
||||
args += extra
|
||||
else:
|
||||
extra = []
|
||||
|
|
@ -55,43 +68,38 @@ def test_targets(target_kind, client, wait, nodebug, multiproc, extra):
|
|||
reload(options)
|
||||
rest = __main__.parse(args)
|
||||
assert list(rest) == extra
|
||||
assert vars(options) == ANY.dict_with({
|
||||
'target_kind': target_kind,
|
||||
'target': target,
|
||||
'host': 'localhost',
|
||||
'port': 8888,
|
||||
'no_debug': bool(nodebug),
|
||||
'wait': bool(wait),
|
||||
'multiprocess': bool(multiproc),
|
||||
})
|
||||
assert vars(options) == some.dict.containing(
|
||||
{
|
||||
"target_kind": target_kind,
|
||||
"target": target,
|
||||
"host": "localhost",
|
||||
"port": 8888,
|
||||
"no_debug": bool(nodebug),
|
||||
"wait": bool(wait),
|
||||
"multiprocess": bool(multiproc),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def test_unsupported_arg():
|
||||
reload(options)
|
||||
with pytest.raises(Exception):
|
||||
__main__.parse([
|
||||
'--port', '8888',
|
||||
'--xyz', '123',
|
||||
'spam.py',
|
||||
])
|
||||
__main__.parse(["--port", "8888", "--xyz", "123", "spam.py"])
|
||||
|
||||
|
||||
def test_host_required():
|
||||
reload(options)
|
||||
with pytest.raises(Exception):
|
||||
__main__.parse([
|
||||
'--port', '8888',
|
||||
'-m', 'spam',
|
||||
])
|
||||
__main__.parse(["--port", "8888", "-m", "spam"])
|
||||
|
||||
|
||||
def test_host_empty():
|
||||
reload(options)
|
||||
__main__.parse(['--host', '', '--port', '8888', 'spam.py'])
|
||||
assert options.host == ''
|
||||
__main__.parse(["--host", "", "--port", "8888", "spam.py"])
|
||||
assert options.host == ""
|
||||
|
||||
|
||||
def test_port_default():
|
||||
reload(options)
|
||||
__main__.parse(['--host', 'localhost', 'spam.py'])
|
||||
__main__.parse(["--host", "localhost", "spam.py"])
|
||||
assert options.port == 5678
|
||||
|
|
|
|||
|
|
@ -10,67 +10,71 @@ import shutil
|
|||
import sys
|
||||
import traceback
|
||||
|
||||
from tests import debug
|
||||
from tests import debug, test_data
|
||||
from tests.patterns import some
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == 'win32', reason='Linux/Mac only test.')
|
||||
@pytest.mark.parametrize('invalid_os_type', [True])
|
||||
def test_client_ide_from_path_mapping_linux_backend(pyfile, tmpdir, start_method, run_as, invalid_os_type):
|
||||
'''
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Linux/Mac only test.")
|
||||
@pytest.mark.parametrize("invalid_os_type", [True])
|
||||
def test_client_ide_from_path_mapping_linux_backend(
|
||||
pyfile, tmpdir, start_method, run_as, invalid_os_type
|
||||
):
|
||||
"""
|
||||
Test simulating that the backend is on Linux and the client is on Windows
|
||||
(automatically detect it from the path mapping).
|
||||
'''
|
||||
"""
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import backchannel
|
||||
from debug_me import backchannel
|
||||
import pydevd_file_utils
|
||||
backchannel.write_json({'ide_os': pydevd_file_utils._ide_os})
|
||||
print('done') # @break_here
|
||||
|
||||
backchannel.write_json({"ide_os": pydevd_file_utils._ide_os})
|
||||
print("done") # @break_here
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
use_backchannel=True,
|
||||
path_mappings=[{
|
||||
'localRoot': 'C:\\TEMP\\src',
|
||||
'remoteRoot': os.path.dirname(code_to_debug),
|
||||
}],
|
||||
path_mappings=[
|
||||
{
|
||||
"localRoot": "C:\\TEMP\\src",
|
||||
"remoteRoot": os.path.dirname(code_to_debug),
|
||||
}
|
||||
],
|
||||
)
|
||||
if invalid_os_type:
|
||||
session.debug_options.append('CLIENT_OS_TYPE=INVALID')
|
||||
bp_line = get_marked_line_numbers(code_to_debug)['break_here']
|
||||
session.set_breakpoints('c:\\temp\\src\\' + os.path.basename(code_to_debug), [bp_line])
|
||||
session.debug_options.append("CLIENT_OS_TYPE=INVALID")
|
||||
session.set_breakpoints(
|
||||
"c:\\temp\\src\\" + os.path.basename(code_to_debug),
|
||||
[code_to_debug.lines["break_here"]],
|
||||
)
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped('breakpoint')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert frames[0]['source']['path'] == 'C:\\TEMP\\src\\' + os.path.basename(code_to_debug)
|
||||
hit = session.wait_for_thread_stopped("breakpoint")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert frames[0]["source"]["path"] == "C:\\TEMP\\src\\" + os.path.basename(
|
||||
code_to_debug
|
||||
)
|
||||
|
||||
json_read = session.read_json()
|
||||
assert json_read == {'ide_os': 'WINDOWS'}
|
||||
assert json_read == {"ide_os": "WINDOWS"}
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
def test_with_dot_remote_root(pyfile, tmpdir, start_method, run_as):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
from debug_me import backchannel
|
||||
import os
|
||||
import backchannel
|
||||
backchannel.write_json(os.path.abspath(__file__))
|
||||
print('done')
|
||||
|
||||
bp_line = 6
|
||||
path_local = tmpdir.mkdir('local').join('code_to_debug.py').strpath
|
||||
path_remote = tmpdir.mkdir('remote').join('code_to_debug.py').strpath
|
||||
backchannel.write_json(os.path.abspath(__file__))
|
||||
print("done") # @bp
|
||||
|
||||
path_local = tmpdir.mkdir("local").join("code_to_debug.py").strpath
|
||||
path_remote = tmpdir.mkdir("remote").join("code_to_debug.py").strpath
|
||||
|
||||
dir_local = os.path.dirname(path_local)
|
||||
dir_remote = os.path.dirname(path_remote)
|
||||
|
|
@ -83,52 +87,46 @@ def test_with_dot_remote_root(pyfile, tmpdir, start_method, run_as):
|
|||
target=(run_as, path_remote),
|
||||
start_method=start_method,
|
||||
use_backchannel=True,
|
||||
path_mappings=[{
|
||||
'localRoot': dir_local,
|
||||
'remoteRoot': '.',
|
||||
}],
|
||||
path_mappings=[{"localRoot": dir_local, "remoteRoot": "."}],
|
||||
cwd=dir_remote,
|
||||
)
|
||||
session.set_breakpoints(path_remote, [bp_line])
|
||||
session.set_breakpoints(path_remote, [code_to_debug["bp"]])
|
||||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped('breakpoint')
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
print('Local Path: ' + path_local)
|
||||
print('Frames: ' + str(frames))
|
||||
assert frames[0]['source']['path'] == Path(path_local)
|
||||
hit = session.wait_for_thread_stopped("breakpoint")
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
print("Local Path: " + path_local)
|
||||
print("Frames: " + str(frames))
|
||||
assert frames[0]["source"]["path"] == some.path(path_local)
|
||||
|
||||
remote_code_path = session.read_json()
|
||||
assert path_remote == Path(remote_code_path)
|
||||
assert path_remote == some.path(remote_code_path)
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
def test_with_path_mappings(pyfile, tmpdir, start_method, run_as):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
from debug_me import backchannel
|
||||
import os
|
||||
import sys
|
||||
import backchannel
|
||||
|
||||
json = backchannel.read_json()
|
||||
call_me_back_dir = json['call_me_back_dir']
|
||||
call_me_back_dir = json["call_me_back_dir"]
|
||||
sys.path.append(call_me_back_dir)
|
||||
|
||||
import call_me_back
|
||||
|
||||
def call_func():
|
||||
print('break here')
|
||||
print("break here") # @bp
|
||||
|
||||
backchannel.write_json(os.path.abspath(__file__))
|
||||
call_me_back.call_me_back(call_func)
|
||||
print('done')
|
||||
print("done")
|
||||
|
||||
bp_line = 13
|
||||
path_local = tmpdir.mkdir('local').join('code_to_debug.py').strpath
|
||||
path_remote = tmpdir.mkdir('remote').join('code_to_debug.py').strpath
|
||||
path_local = tmpdir.mkdir("local").join("code_to_debug.py").strpath
|
||||
path_remote = tmpdir.mkdir("remote").join("code_to_debug.py").strpath
|
||||
|
||||
dir_local = os.path.dirname(path_local)
|
||||
dir_remote = os.path.dirname(path_remote)
|
||||
|
|
@ -136,46 +134,45 @@ def test_with_path_mappings(pyfile, tmpdir, start_method, run_as):
|
|||
shutil.copyfile(code_to_debug, path_local)
|
||||
shutil.copyfile(code_to_debug, path_remote)
|
||||
|
||||
call_me_back_dir = get_test_root('call_me_back')
|
||||
call_me_back_dir = test_data / "call_me_back"
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, path_remote),
|
||||
start_method=start_method,
|
||||
use_backchannel=True,
|
||||
path_mappings=[{
|
||||
'localRoot': dir_local,
|
||||
'remoteRoot': dir_remote,
|
||||
}],
|
||||
path_mappings=[{"localRoot": dir_local, "remoteRoot": dir_remote}],
|
||||
)
|
||||
session.set_breakpoints(path_remote, [bp_line])
|
||||
session.set_breakpoints(path_remote, [code_to_debug.lines["bp"]])
|
||||
session.start_debugging()
|
||||
session.write_json({'call_me_back_dir': call_me_back_dir})
|
||||
hit = session.wait_for_thread_stopped('breakpoint')
|
||||
session.write_json({"call_me_back_dir": call_me_back_dir})
|
||||
hit = session.wait_for_thread_stopped("breakpoint")
|
||||
|
||||
frames = hit.stacktrace.body['stackFrames']
|
||||
assert frames[0]['source']['path'] == Path(path_local)
|
||||
source_reference = frames[0]['source']['sourceReference']
|
||||
frames = hit.stacktrace.body["stackFrames"]
|
||||
assert frames[0]["source"]["path"] == some.path(path_local)
|
||||
source_reference = frames[0]["source"]["sourceReference"]
|
||||
assert source_reference == 0 # Mapped files should be found locally.
|
||||
|
||||
assert frames[1]['source']['path'].endswith('call_me_back.py')
|
||||
source_reference = frames[1]['source']['sourceReference']
|
||||
assert frames[1]["source"]["path"].endswith("call_me_back.py")
|
||||
source_reference = frames[1]["source"]["sourceReference"]
|
||||
assert source_reference > 0 # Unmapped file should have a source reference.
|
||||
|
||||
resp_source = session.send_request('source', arguments={
|
||||
'sourceReference': 0
|
||||
}).wait_for_response(raise_if_failed=False)
|
||||
resp_source = session.send_request(
|
||||
"source", arguments={"sourceReference": 0}
|
||||
).wait_for_response(raise_if_failed=False)
|
||||
assert not resp_source.success
|
||||
text = ''.join(traceback.format_exception_only(type(resp_source.body), resp_source.body))
|
||||
assert 'Source unavailable' in text
|
||||
text = "".join(
|
||||
traceback.format_exception_only(type(resp_source.body), resp_source.body)
|
||||
)
|
||||
assert "Source unavailable" in text
|
||||
|
||||
resp_source = session.send_request('source', arguments={
|
||||
'sourceReference': source_reference
|
||||
}).wait_for_response()
|
||||
assert "def call_me_back(callback):" in (resp_source.body['content'])
|
||||
resp_source = session.send_request(
|
||||
"source", arguments={"sourceReference": source_reference}
|
||||
).wait_for_response()
|
||||
assert "def call_me_back(callback):" in (resp_source.body["content"])
|
||||
|
||||
remote_code_path = session.read_json()
|
||||
assert path_remote == Path(remote_code_path)
|
||||
assert path_remote == some.path(remote_code_path)
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ from tests.patterns import some
|
|||
from tests.timeline import Event
|
||||
|
||||
|
||||
@pytest.mark.parametrize('run_as', ['file', 'module', 'code'])
|
||||
@pytest.mark.parametrize("run_as", ["file", "module", "code"])
|
||||
def test_run(pyfile, start_method, run_as):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
|
|
@ -22,10 +22,10 @@ def test_run(pyfile, start_method, run_as):
|
|||
from os import path
|
||||
import sys
|
||||
|
||||
print('begin')
|
||||
assert backchannel.receive() == 'continue'
|
||||
backchannel.send(path.abspath(sys.modules['ptvsd'].__file__))
|
||||
print('end')
|
||||
print("begin")
|
||||
assert backchannel.receive() == "continue"
|
||||
backchannel.send(path.abspath(sys.modules["ptvsd"].__file__))
|
||||
print("end")
|
||||
|
||||
with debug.Session(start_method) as session:
|
||||
backchannel = session.setup_backchannel()
|
||||
|
|
@ -33,56 +33,56 @@ def test_run(pyfile, start_method, run_as):
|
|||
session.start_debugging()
|
||||
assert session.timeline.is_frozen
|
||||
|
||||
process_event, = session.all_occurrences_of(Event('process'))
|
||||
process_event, = session.all_occurrences_of(Event("process"))
|
||||
expected_name = (
|
||||
'-c' if run_as == 'code'
|
||||
else some.str.matching(re.escape(code_to_debug) + r'(c|o)?$')
|
||||
"-c"
|
||||
if run_as == "code"
|
||||
else some.str.matching(re.escape(code_to_debug) + r"(c|o)?$")
|
||||
)
|
||||
assert process_event == Event(
|
||||
"process", some.dict.containing({"name": expected_name})
|
||||
)
|
||||
assert process_event == Event('process', some.dict.containing({
|
||||
'name': expected_name
|
||||
}))
|
||||
|
||||
backchannel.send('continue')
|
||||
backchannel.send("continue")
|
||||
ptvsd_path = backchannel.receive()
|
||||
expected_ptvsd_path = path.abspath(ptvsd.__file__)
|
||||
assert re.match(re.escape(expected_ptvsd_path) + r'(c|o)?$', ptvsd_path)
|
||||
assert re.match(re.escape(expected_ptvsd_path) + r"(c|o)?$", ptvsd_path)
|
||||
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
def test_run_submodule():
|
||||
cwd = str(test_data / 'testpkgs')
|
||||
with debug.Session('launch') as session:
|
||||
session.initialize(target=('module', 'pkg1.sub'), cwd=cwd)
|
||||
cwd = str(test_data / "testpkgs")
|
||||
with debug.Session("launch") as session:
|
||||
session.initialize(target=("module", "pkg1.sub"), cwd=cwd)
|
||||
session.start_debugging()
|
||||
session.wait_for_next(Event('output', some.dict.containing({
|
||||
'category': 'stdout',
|
||||
'output': 'three'
|
||||
})))
|
||||
session.wait_for_next(
|
||||
Event(
|
||||
"output",
|
||||
some.dict.containing({"category": "stdout", "output": "three"}),
|
||||
)
|
||||
)
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('run_as', ['file', 'module', 'code'])
|
||||
@pytest.mark.parametrize("run_as", ["file", "module", "code"])
|
||||
def test_nodebug(pyfile, run_as):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from debug_me import backchannel
|
||||
backchannel.receive() #@ bp1
|
||||
print('ok') #@ bp2
|
||||
|
||||
with debug.Session('launch') as session:
|
||||
backchannel.receive() # @ bp1
|
||||
print("ok") # @ bp2
|
||||
|
||||
with debug.Session("launch") as session:
|
||||
session.no_debug = True
|
||||
backchannel = session.setup_backchannel()
|
||||
session.initialize(target=(run_as, code_to_debug))
|
||||
|
||||
breakpoints = session.set_breakpoints(code_to_debug, [
|
||||
code_to_debug.lines["bp1"],
|
||||
code_to_debug.lines["bp2"],
|
||||
])
|
||||
assert breakpoints == [
|
||||
{'verified': False},
|
||||
{'verified': False},
|
||||
]
|
||||
breakpoints = session.set_breakpoints(
|
||||
code_to_debug, [code_to_debug.lines["bp1"], code_to_debug.lines["bp2"]]
|
||||
)
|
||||
assert breakpoints == [{"verified": False}, {"verified": False}]
|
||||
|
||||
session.start_debugging()
|
||||
backchannel.send(None)
|
||||
|
|
@ -90,19 +90,21 @@ def test_nodebug(pyfile, run_as):
|
|||
# Breakpoint shouldn't be hit.
|
||||
session.wait_for_exit()
|
||||
|
||||
session.expect_realized(Event('output', some.dict.containing({
|
||||
'category': 'stdout',
|
||||
'output': 'ok',
|
||||
})))
|
||||
session.expect_realized(
|
||||
Event(
|
||||
"output", some.dict.containing({"category": "stdout", "output": "ok"})
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('run_as', ['script', 'module'])
|
||||
@pytest.mark.parametrize("run_as", ["script", "module"])
|
||||
def test_run_vs(pyfile, run_as):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from debug_me import backchannel
|
||||
print('ok')
|
||||
backchannel.send('ok')
|
||||
|
||||
print("ok")
|
||||
backchannel.send("ok")
|
||||
|
||||
@pyfile
|
||||
def ptvsd_launcher():
|
||||
|
|
@ -110,19 +112,19 @@ def test_run_vs(pyfile, run_as):
|
|||
import ptvsd.debugger
|
||||
|
||||
args = tuple(backchannel.receive())
|
||||
print('debug{0!r}'.format(args))
|
||||
print("debug{0!r}".format(args))
|
||||
ptvsd.debugger.debug(*args)
|
||||
|
||||
filename = 'code_to_debug' if run_as == 'module' else code_to_debug
|
||||
with debug.Session('custom_client') as session:
|
||||
filename = "code_to_debug" if run_as == "module" else code_to_debug
|
||||
with debug.Session("custom_client") as session:
|
||||
backchannel = session.setup_backchannel()
|
||||
|
||||
session.before_connect = lambda: backchannel.send([
|
||||
filename, session.ptvsd_port, None, None, run_as
|
||||
])
|
||||
session.before_connect = lambda: backchannel.send(
|
||||
[filename, session.ptvsd_port, None, None, run_as]
|
||||
)
|
||||
|
||||
session.initialize(target=('file', ptvsd_launcher))
|
||||
session.initialize(target=("file", ptvsd_launcher))
|
||||
session.start_debugging()
|
||||
|
||||
assert backchannel.receive() == 'ok'
|
||||
assert backchannel.receive() == "ok"
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -9,13 +9,11 @@ from tests.patterns import some
|
|||
|
||||
|
||||
def test_set_expression(pyfile, start_method, run_as):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import backchannel
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
from debug_me import backchannel
|
||||
import ptvsd
|
||||
|
||||
a = 1
|
||||
ptvsd.break_into_debugger()
|
||||
backchannel.write_json(a)
|
||||
|
|
@ -29,35 +27,38 @@ def test_set_expression(pyfile, start_method, run_as):
|
|||
session.start_debugging()
|
||||
hit = session.wait_for_thread_stopped()
|
||||
|
||||
resp_scopes = session.send_request('scopes', arguments={
|
||||
'frameId': hit.frame_id
|
||||
}).wait_for_response()
|
||||
scopes = resp_scopes.body['scopes']
|
||||
resp_scopes = session.send_request(
|
||||
"scopes", arguments={"frameId": hit.frame_id}
|
||||
).wait_for_response()
|
||||
scopes = resp_scopes.body["scopes"]
|
||||
assert len(scopes) > 0
|
||||
|
||||
resp_variables = session.send_request('variables', arguments={
|
||||
'variablesReference': scopes[0]['variablesReference']
|
||||
}).wait_for_response()
|
||||
variables = list(v for v in resp_variables.body['variables'] if v['name'] == 'a')
|
||||
assert variables == [{
|
||||
'type': 'int',
|
||||
'value': '1',
|
||||
'name': 'a',
|
||||
'evaluateName': "a",
|
||||
'variablesReference': 0,
|
||||
}]
|
||||
resp_variables = session.send_request(
|
||||
"variables",
|
||||
arguments={"variablesReference": scopes[0]["variablesReference"]},
|
||||
).wait_for_response()
|
||||
variables = list(
|
||||
v for v in resp_variables.body["variables"] if v["name"] == "a"
|
||||
)
|
||||
assert variables == [
|
||||
{
|
||||
"type": "int",
|
||||
"value": "1",
|
||||
"name": "a",
|
||||
"evaluateName": "a",
|
||||
"variablesReference": 0,
|
||||
}
|
||||
]
|
||||
|
||||
resp_set_variable = session.send_request('setExpression', arguments={
|
||||
'frameId': hit.frame_id,
|
||||
'expression': 'a',
|
||||
'value': '1000'
|
||||
}).wait_for_response()
|
||||
assert resp_set_variable.body == ANY.dict_with({
|
||||
'type': 'int',
|
||||
'value': '1000'
|
||||
})
|
||||
resp_set_variable = session.send_request(
|
||||
"setExpression",
|
||||
arguments={"frameId": hit.frame_id, "expression": "a", "value": "1000"},
|
||||
).wait_for_response()
|
||||
assert resp_set_variable.body == some.dict.containing(
|
||||
{"type": "int", "value": "1000"}
|
||||
)
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
assert session.read_json() == 1000
|
||||
|
||||
|
|
|
|||
|
|
@ -12,117 +12,114 @@ from tests import debug
|
|||
from tests.patterns import some
|
||||
|
||||
|
||||
@pytest.mark.parametrize('start_method', ['launch'])
|
||||
@pytest.mark.skipif(sys.version_info < (3, 0) and platform.system() == 'Windows',
|
||||
reason="On Win32 Python2.7, unable to send key strokes to test.")
|
||||
@pytest.mark.parametrize("start_method", ["launch"])
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info < (3, 0) and platform.system() == "Windows",
|
||||
reason="On Win32 Python2.7, unable to send key strokes to test.",
|
||||
)
|
||||
def test_wait_on_normal_exit_enabled(pyfile, start_method, run_as):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import backchannel
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
from debug_me import backchannel
|
||||
import ptvsd
|
||||
|
||||
ptvsd.break_into_debugger()
|
||||
backchannel.write_json('done')
|
||||
backchannel.write_json("done")
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
debug_options=['WaitOnNormalExit'],
|
||||
debug_options=["WaitOnNormalExit"],
|
||||
use_backchannel=True,
|
||||
)
|
||||
session.start_debugging()
|
||||
|
||||
session.wait_for_thread_stopped()
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
session.expected_returncode = ANY.int
|
||||
assert session.read_json() == 'done'
|
||||
session.expected_returncode = some.int
|
||||
assert session.read_json() == "done"
|
||||
|
||||
session.process.stdin.write(b' \r\n')
|
||||
session.process.stdin.write(b" \r\n")
|
||||
session.wait_for_exit()
|
||||
|
||||
decoded = u'\n'.join(
|
||||
(x.decode('utf-8') if isinstance(x, bytes) else x)
|
||||
for x in session.output_data['OUT']
|
||||
decoded = "\n".join(
|
||||
(x.decode("utf-8") if isinstance(x, bytes) else x)
|
||||
for x in session.output_data["OUT"]
|
||||
)
|
||||
|
||||
assert u'Press' in decoded
|
||||
assert "Press" in decoded
|
||||
|
||||
|
||||
@pytest.mark.parametrize('start_method', ['launch'])
|
||||
@pytest.mark.skipif(sys.version_info < (3, 0) and platform.system() == 'Windows',
|
||||
reason="On windows py2.7 unable to send key strokes to test.")
|
||||
@pytest.mark.parametrize("start_method", ["launch"])
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info < (3, 0) and platform.system() == "Windows",
|
||||
reason="On windows py2.7 unable to send key strokes to test.",
|
||||
)
|
||||
def test_wait_on_abnormal_exit_enabled(pyfile, start_method, run_as):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import backchannel
|
||||
from debug_me import backchannel
|
||||
import sys
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import ptvsd
|
||||
|
||||
ptvsd.break_into_debugger()
|
||||
backchannel.write_json('done')
|
||||
backchannel.write_json("done")
|
||||
sys.exit(12345)
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
debug_options=['WaitOnAbnormalExit'],
|
||||
debug_options=["WaitOnAbnormalExit"],
|
||||
use_backchannel=True,
|
||||
)
|
||||
session.start_debugging()
|
||||
|
||||
session.wait_for_thread_stopped()
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
session.expected_returncode = ANY.int
|
||||
assert session.read_json() == 'done'
|
||||
session.expected_returncode = some.int
|
||||
assert session.read_json() == "done"
|
||||
|
||||
session.process.stdin.write(b' \r\n')
|
||||
session.process.stdin.write(b" \r\n")
|
||||
session.wait_for_exit()
|
||||
|
||||
def _decode(text):
|
||||
if isinstance(text, bytes):
|
||||
return text.decode('utf-8')
|
||||
return text.decode("utf-8")
|
||||
return text
|
||||
|
||||
assert any(
|
||||
l for l in session.output_data['OUT']
|
||||
if _decode(l).startswith('Press')
|
||||
l for l in session.output_data["OUT"] if _decode(l).startswith("Press")
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('start_method', ['launch'])
|
||||
@pytest.mark.parametrize("start_method", ["launch"])
|
||||
def test_exit_normally_with_wait_on_abnormal_exit_enabled(pyfile, start_method, run_as):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import backchannel
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
from debug_me import backchannel
|
||||
import ptvsd
|
||||
|
||||
ptvsd.break_into_debugger()
|
||||
backchannel.write_json('done')
|
||||
backchannel.write_json("done")
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
debug_options=['WaitOnAbnormalExit'],
|
||||
debug_options=["WaitOnAbnormalExit"],
|
||||
use_backchannel=True,
|
||||
)
|
||||
session.start_debugging()
|
||||
|
||||
session.wait_for_thread_stopped()
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
|
||||
session.wait_for_termination()
|
||||
|
||||
assert session.read_json() == 'done'
|
||||
assert session.read_json() == "done"
|
||||
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -12,11 +12,9 @@ from tests.timeline import Event
|
|||
|
||||
|
||||
def test_set_next_statement(pyfile, start_method, run_as):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
|
||||
def func():
|
||||
print(1) # @inner1
|
||||
|
|
@ -25,65 +23,67 @@ def test_set_next_statement(pyfile, start_method, run_as):
|
|||
print(3) # @outer3
|
||||
func()
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
line_numbers = code_to_debug.lines
|
||||
print(line_numbers)
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
ignore_unobserved=[Event('continued')],
|
||||
env={'PTVSD_USE_CONTINUED': '1'},
|
||||
ignore_unobserved=[Event("continued")],
|
||||
env={"PTVSD_USE_CONTINUED": "1"},
|
||||
)
|
||||
session.set_breakpoints(code_to_debug, [line_numbers['inner1']])
|
||||
session.set_breakpoints(code_to_debug, [line_numbers["inner1"]])
|
||||
session.start_debugging()
|
||||
|
||||
stop = session.wait_for_thread_stopped()
|
||||
frames = stop.stacktrace.body['stackFrames']
|
||||
line = frames[0]['line']
|
||||
assert line == line_numbers['inner1']
|
||||
frames = stop.stacktrace.body["stackFrames"]
|
||||
line = frames[0]["line"]
|
||||
assert line == line_numbers["inner1"]
|
||||
|
||||
targets = session.send_request('gotoTargets', {
|
||||
'source': {'path': code_to_debug},
|
||||
'line': line_numbers['outer3'],
|
||||
}).wait_for_response().body['targets']
|
||||
targets = (
|
||||
session.send_request(
|
||||
"gotoTargets",
|
||||
{"source": {"path": code_to_debug}, "line": line_numbers["outer3"]},
|
||||
)
|
||||
.wait_for_response()
|
||||
.body["targets"]
|
||||
)
|
||||
|
||||
assert targets == [{
|
||||
'id': ANY.num,
|
||||
'label': ANY.str,
|
||||
'line': line_numbers['outer3']
|
||||
}]
|
||||
outer3_target = targets[0]['id']
|
||||
assert targets == [
|
||||
{"id": some.number, "label": some.str, "line": line_numbers["outer3"]}
|
||||
]
|
||||
outer3_target = targets[0]["id"]
|
||||
|
||||
with pytest.raises(Exception):
|
||||
session.send_request('goto', {
|
||||
'threadId': stop.thread_id,
|
||||
'targetId': outer3_target,
|
||||
}).wait_for_response()
|
||||
session.send_request(
|
||||
"goto", {"threadId": stop.thread_id, "targetId": outer3_target}
|
||||
).wait_for_response()
|
||||
|
||||
targets = session.send_request('gotoTargets', {
|
||||
'source': {'path': code_to_debug},
|
||||
'line': line_numbers['inner2'],
|
||||
}).wait_for_response().body['targets']
|
||||
targets = (
|
||||
session.send_request(
|
||||
"gotoTargets",
|
||||
{"source": {"path": code_to_debug}, "line": line_numbers["inner2"]},
|
||||
)
|
||||
.wait_for_response()
|
||||
.body["targets"]
|
||||
)
|
||||
|
||||
assert targets == [{
|
||||
'id': ANY.num,
|
||||
'label': ANY.str,
|
||||
'line': line_numbers['inner2'],
|
||||
}]
|
||||
inner2_target = targets[0]['id']
|
||||
assert targets == [
|
||||
{"id": some.number, "label": some.str, "line": line_numbers["inner2"]}
|
||||
]
|
||||
inner2_target = targets[0]["id"]
|
||||
|
||||
session.send_request('goto', {
|
||||
'threadId': stop.thread_id,
|
||||
'targetId': inner2_target,
|
||||
}).wait_for_response()
|
||||
session.send_request(
|
||||
"goto", {"threadId": stop.thread_id, "targetId": inner2_target}
|
||||
).wait_for_response()
|
||||
|
||||
session.wait_for_next(Event('continued'))
|
||||
session.wait_for_next(Event("continued"))
|
||||
|
||||
stop = session.wait_for_thread_stopped(reason='goto')
|
||||
frames = stop.stacktrace.body['stackFrames']
|
||||
line = frames[0]['line']
|
||||
assert line == line_numbers['inner2']
|
||||
stop = session.wait_for_thread_stopped(reason="goto")
|
||||
frames = stop.stacktrace.body["stackFrames"]
|
||||
line = frames[0]["line"]
|
||||
assert line == line_numbers["inner2"]
|
||||
|
||||
session.send_request('continue').wait_for_response()
|
||||
session.send_request("continue").wait_for_response()
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -10,50 +10,53 @@ from tests import debug
|
|||
from tests.patterns import some
|
||||
|
||||
|
||||
@pytest.mark.parametrize('start_method', ['launch'])
|
||||
@pytest.mark.parametrize('with_bp', ['with_breakpoint', ''])
|
||||
@pytest.mark.parametrize("start_method", ["launch"])
|
||||
@pytest.mark.parametrize("with_bp", ["with_breakpoint", ""])
|
||||
def test_stop_on_entry(pyfile, start_method, run_as, with_bp):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import backchannel # @bp
|
||||
# import_and_enable_debugger()
|
||||
backchannel.write_json('done')
|
||||
from debug_me import backchannel # @bp
|
||||
|
||||
backchannel.write_json("done")
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
debug_options=['StopOnEntry'],
|
||||
debug_options=["StopOnEntry"],
|
||||
use_backchannel=True,
|
||||
)
|
||||
if bool(with_bp):
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
bp_line = line_numbers['bp']
|
||||
session.set_breakpoints(code_to_debug, [bp_line])
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["bp"]])
|
||||
|
||||
session.start_debugging()
|
||||
|
||||
if bool(with_bp):
|
||||
thread_stopped, resp_stacktrace, thread_id, _ = session.wait_for_thread_stopped(reason='breakpoint')
|
||||
frames = resp_stacktrace.body['stackFrames']
|
||||
assert frames[0]['line'] == 1
|
||||
assert frames[0]['source']['path'] == Path(code_to_debug)
|
||||
thread_stopped, resp_stacktrace, thread_id, _ = session.wait_for_thread_stopped(
|
||||
reason="breakpoint"
|
||||
)
|
||||
frames = resp_stacktrace.body["stackFrames"]
|
||||
assert frames[0]["line"] == 1
|
||||
assert frames[0]["source"]["path"] == some.path(code_to_debug)
|
||||
|
||||
session.send_request('next', {'threadId': thread_id}).wait_for_response()
|
||||
thread_stopped, resp_stacktrace, thread_id, _ = session.wait_for_thread_stopped(reason='step')
|
||||
frames = resp_stacktrace.body['stackFrames']
|
||||
assert frames[0]['line'] == 3
|
||||
assert frames[0]['source']['path'] == Path(code_to_debug)
|
||||
session.send_request("next", {"threadId": thread_id}).wait_for_response()
|
||||
thread_stopped, resp_stacktrace, thread_id, _ = session.wait_for_thread_stopped(
|
||||
reason="step"
|
||||
)
|
||||
frames = resp_stacktrace.body["stackFrames"]
|
||||
assert frames[0]["line"] == 3
|
||||
assert frames[0]["source"]["path"] == some.path(code_to_debug)
|
||||
else:
|
||||
thread_stopped, resp_stacktrace, tid, _ = session.wait_for_thread_stopped(reason='entry')
|
||||
frames = resp_stacktrace.body['stackFrames']
|
||||
assert frames[0]['line'] == 1
|
||||
assert frames[0]['source']['path'] == Path(code_to_debug)
|
||||
thread_stopped, resp_stacktrace, tid, _ = session.wait_for_thread_stopped(
|
||||
reason="entry"
|
||||
)
|
||||
frames = resp_stacktrace.body["stackFrames"]
|
||||
assert frames[0]["line"] == 1
|
||||
assert frames[0]["source"]["path"] == some.path(code_to_debug)
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_termination()
|
||||
|
||||
assert session.read_json() == 'done'
|
||||
assert session.read_json() == "done"
|
||||
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -10,16 +10,15 @@ import pytest
|
|||
from tests import debug
|
||||
|
||||
|
||||
@pytest.mark.parametrize('count', [1, 3])
|
||||
@pytest.mark.parametrize("count", [1, 3])
|
||||
def test_thread_count(pyfile, start_method, run_as, count):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
import threading
|
||||
import time
|
||||
import sys
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
|
||||
stop = False
|
||||
|
||||
def worker(tid, offset):
|
||||
|
|
@ -30,42 +29,39 @@ def test_thread_count(pyfile, start_method, run_as, count):
|
|||
i += 1
|
||||
|
||||
threads = []
|
||||
if sys.argv[1] != '1':
|
||||
if sys.argv[1] != "1":
|
||||
for i in [111, 222]:
|
||||
thread = threading.Thread(target=worker, args=(i, len(threads)))
|
||||
threads.append(thread)
|
||||
thread.start()
|
||||
print('check here') # @bp
|
||||
print("check here") # @bp
|
||||
stop = True
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
program_args=[str(count)],
|
||||
)
|
||||
session.set_breakpoints(code_to_debug, [line_numbers['bp']])
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["bp"]])
|
||||
session.start_debugging()
|
||||
session.wait_for_thread_stopped()
|
||||
resp_threads = session.send_request('threads').wait_for_response()
|
||||
resp_threads = session.send_request("threads").wait_for_response()
|
||||
|
||||
assert len(resp_threads.body['threads']) == count
|
||||
assert len(resp_threads.body["threads"]) == count
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
platform.system() not in ['Windows', 'Linux', 'Darwin'],
|
||||
reason='Test not implemented on ' + platform.system())
|
||||
platform.system() not in ["Windows", "Linux", "Darwin"],
|
||||
reason="Test not implemented on " + platform.system(),
|
||||
)
|
||||
def test_debug_this_thread(pyfile, start_method, run_as):
|
||||
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
|
||||
import debug_me # noqa
|
||||
import platform
|
||||
import ptvsd
|
||||
import threading
|
||||
|
|
@ -77,33 +73,43 @@ def test_debug_this_thread(pyfile, start_method, run_as):
|
|||
|
||||
event = threading.Event()
|
||||
|
||||
if platform.system() == 'Windows':
|
||||
if platform.system() == "Windows":
|
||||
from ctypes import CFUNCTYPE, c_void_p, c_size_t, c_uint32, windll
|
||||
|
||||
thread_func_p = CFUNCTYPE(c_uint32, c_void_p)
|
||||
thread_func = thread_func_p(foo) # must hold a reference to wrapper during the call
|
||||
assert windll.kernel32.CreateThread(c_void_p(0), c_size_t(0), thread_func, c_void_p(0), c_uint32(0), c_void_p(0))
|
||||
elif platform.system() == 'Linux' or platform.system() == 'Darwin':
|
||||
thread_func = thread_func_p(
|
||||
foo
|
||||
) # must hold a reference to wrapper during the call
|
||||
assert windll.kernel32.CreateThread(
|
||||
c_void_p(0),
|
||||
c_size_t(0),
|
||||
thread_func,
|
||||
c_void_p(0),
|
||||
c_uint32(0),
|
||||
c_void_p(0),
|
||||
)
|
||||
elif platform.system() == "Linux" or platform.system() == "Darwin":
|
||||
from ctypes import CDLL, CFUNCTYPE, byref, c_void_p, c_ulong
|
||||
from ctypes.util import find_library
|
||||
libpthread = CDLL(find_library('libpthread'))
|
||||
|
||||
libpthread = CDLL(find_library("libpthread"))
|
||||
thread_func_p = CFUNCTYPE(c_void_p, c_void_p)
|
||||
thread_func = thread_func_p(foo) # must hold a reference to wrapper during the call
|
||||
assert not libpthread.pthread_create(byref(c_ulong(0)), c_void_p(0), thread_func, c_void_p(0))
|
||||
thread_func = thread_func_p(
|
||||
foo
|
||||
) # must hold a reference to wrapper during the call
|
||||
assert not libpthread.pthread_create(
|
||||
byref(c_ulong(0)), c_void_p(0), thread_func, c_void_p(0)
|
||||
)
|
||||
else:
|
||||
assert False
|
||||
|
||||
event.wait()
|
||||
|
||||
line_numbers = get_marked_line_numbers(code_to_debug)
|
||||
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
)
|
||||
session.set_breakpoints(code_to_debug, [line_numbers['bp']])
|
||||
session.initialize(target=(run_as, code_to_debug), start_method=start_method)
|
||||
session.set_breakpoints(code_to_debug, [code_to_debug.lines["bp"]])
|
||||
session.start_debugging()
|
||||
|
||||
session.wait_for_thread_stopped()
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -11,87 +11,90 @@ from tests.patterns import some
|
|||
from tests.timeline import Event
|
||||
|
||||
|
||||
@pytest.mark.parametrize('module', [True, False])
|
||||
@pytest.mark.parametrize('line', [True, False])
|
||||
@pytest.mark.parametrize("module", [True, False])
|
||||
@pytest.mark.parametrize("line", [True, False])
|
||||
def test_stack_format(pyfile, start_method, run_as, module, line):
|
||||
@pyfile
|
||||
def code_to_debug():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
from test_module import do_something
|
||||
|
||||
do_something()
|
||||
|
||||
@pyfile
|
||||
def test_module():
|
||||
# import_and_enable_debugger()
|
||||
def do_something():
|
||||
print('break here')
|
||||
print("break here") # @bp
|
||||
|
||||
bp_line = 3
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, code_to_debug),
|
||||
start_method=start_method,
|
||||
ignore_unobserved=[Event('stopped')],
|
||||
ignore_unobserved=[Event("stopped")],
|
||||
)
|
||||
session.set_breakpoints(test_module, [bp_line])
|
||||
session.set_breakpoints(test_module, [code_to_debug.lines["bp"]])
|
||||
session.start_debugging()
|
||||
|
||||
hit = session.wait_for_thread_stopped()
|
||||
resp_stacktrace = session.send_request('stackTrace', arguments={
|
||||
'threadId': hit.thread_id,
|
||||
'format': {'module': module, 'line': line},
|
||||
}).wait_for_response()
|
||||
assert resp_stacktrace.body['totalFrames'] > 0
|
||||
frames = resp_stacktrace.body['stackFrames']
|
||||
resp_stacktrace = session.send_request(
|
||||
"stackTrace",
|
||||
arguments={
|
||||
"threadId": hit.thread_id,
|
||||
"format": {"module": module, "line": line},
|
||||
},
|
||||
).wait_for_response()
|
||||
assert resp_stacktrace.body["totalFrames"] > 0
|
||||
frames = resp_stacktrace.body["stackFrames"]
|
||||
|
||||
assert line == (frames[0]['name'].find(': ' + str(bp_line)) > -1)
|
||||
assert line == (
|
||||
frames[0]["name"].find(": " + str(code_to_debug.lines["bp"])) > -1
|
||||
)
|
||||
|
||||
assert module == (frames[0]['name'].find('test_module') > -1)
|
||||
assert module == (frames[0]["name"].find("test_module") > -1)
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
||||
|
||||
def test_module_events(pyfile, start_method, run_as):
|
||||
@pyfile
|
||||
def module2():
|
||||
# import_and_enable_debugger()
|
||||
def do_more_things():
|
||||
print('done')
|
||||
print("done") # @bp
|
||||
|
||||
@pyfile
|
||||
def module1():
|
||||
# import_and_enable_debugger()
|
||||
import module2
|
||||
|
||||
def do_something():
|
||||
module2.do_more_things()
|
||||
|
||||
@pyfile
|
||||
def test_code():
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
from module1 import do_something
|
||||
|
||||
do_something()
|
||||
|
||||
bp_line = 3
|
||||
with debug.Session() as session:
|
||||
session.initialize(
|
||||
target=(run_as, test_code),
|
||||
start_method=start_method,
|
||||
ignore_unobserved=[Event('stopped')],
|
||||
ignore_unobserved=[Event("stopped")],
|
||||
)
|
||||
session.set_breakpoints(module2, [bp_line])
|
||||
session.set_breakpoints(module2, [module2.lines["bp"]])
|
||||
session.start_debugging()
|
||||
|
||||
session.wait_for_thread_stopped()
|
||||
modules = session.all_occurrences_of(Event('module'))
|
||||
modules = [(m.body['module']['name'], m.body['module']['path']) for m in modules]
|
||||
modules = session.all_occurrences_of(Event("module"))
|
||||
modules = [
|
||||
(m.body["module"]["name"], m.body["module"]["path"]) for m in modules
|
||||
]
|
||||
assert modules[:3] == [
|
||||
('module2', Path(module2)),
|
||||
('module1', Path(module1)),
|
||||
('__main__', Path(test_code)),
|
||||
("module2", some.path(module2)),
|
||||
("module1", some.path(module1)),
|
||||
("__main__", some.path(test_code)),
|
||||
]
|
||||
|
||||
session.send_request('continue').wait_for_response(freeze=False)
|
||||
session.send_request("continue").wait_for_response(freeze=False)
|
||||
session.wait_for_exit()
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
from dbgimporter import import_and_enable_debugger
|
||||
import_and_enable_debugger()
|
||||
import debug_me # noqa
|
||||
def ಏನಾದರೂ_ಮಾಡು():
|
||||
print('ಏನೋ ಮಾಡಿದೆ'.encode(sys.stdout.encoding, errors='replace'))
|
||||
print('ಏನೋ ಮಾಡಿದೆ'.encode(sys.stdout.encoding, errors='replace')) # @bp
|
||||
|
||||
|
||||
ಏನಾದರೂ_ಮಾಡು()
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ settings.configure(
|
|||
def home(request):
|
||||
title = 'hello'
|
||||
content = 'Django-Django-Test'
|
||||
template = loader.get_template('hello.html')
|
||||
template = loader.get_template('hello.html') # @bphome
|
||||
context = {
|
||||
'title': title,
|
||||
'content': content,
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ app = Flask(__name__)
|
|||
@app.route("/")
|
||||
def home():
|
||||
content = 'Flask-Jinja-Test'
|
||||
print('break here') # @bphome
|
||||
return render_template(
|
||||
"hello.html",
|
||||
title='Hello',
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
print('one')
|
||||
print('two')
|
||||
print('three')
|
||||
print('one') # @one
|
||||
print('two') # @two
|
||||
print('three') # @three
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ the return value of `wait_until()` is the first occurrence that realized the exp
|
|||
|
||||
A *basic* expectation is described by the circumstances of the occurrence the expectation is to be realized (`expectation.circumstances`). Whereas the circumstances of an occurrence is a data object, the circumstances of the expectation is a *pattern*, as represented by a `Pattern` object from the `pattern` module. An expectation is realized by an occurrence if `occurrence.circumstances in expectation.circumstances` is true (where `in` is an overloaded operator of the `Pattern` object that is used to match values against it; see the docstrings for the `pattern` module for details on patterns). For example, given a basic expectation with these circumstances:
|
||||
```py
|
||||
('Event', ANY, ANY.dict_with({'threadId': 1}))
|
||||
('Event', ANY, some.dict.containing({'threadId': 1}))
|
||||
```
|
||||
It can be realized by any of these occurrences:
|
||||
```py
|
||||
|
|
@ -177,7 +177,7 @@ initialized = debug_session.wait_until(Event('initialized'))
|
|||
assert (
|
||||
initialize
|
||||
>>
|
||||
Event('output', ANY.dict_with({'category': 'telemetry'}))
|
||||
Event('output', some.dict.containing({'category': 'telemetry'}))
|
||||
>>
|
||||
initialized
|
||||
) in debug_session.timeline
|
||||
|
|
@ -307,7 +307,7 @@ debug_session.send_request('setBreakpoints', [
|
|||
Once all the initial setup is performed, we can start execution:
|
||||
```py
|
||||
debug_session.start_debugging()
|
||||
debug_session.wait_until(Event('stopped', ANY.dict_with({'reason': 'breakpoint'})))
|
||||
debug_session.wait_until(Event('stopped', some.dict.containing({'reason': 'breakpoint'})))
|
||||
```
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue