Fix #1695: Handle IDE, launcher, debug server, and no-debug disconnect

Fix #1721 "runInTerminal" is broken on non-Windows platforms.

Fix #1722: Output is not captured in "noDebug" with "runInTerminal"

Groundwork for #1713: adapter: multiple concurrent sessions

Move "launch" request parsing and debuggee process spawning, PID reporting and tracking, stdio "output" capture, and exit code reporting into launcher. Launcher now communicates to the adapter via a full-fledged message channel.

Refactor adapter. Add an abstraction for a debug session, and treat IDE, launcher, and debug server as separate components managed by that session.

Improve adapter logging to capture information about current debug session, and current message handler if any.

Fix reporting exceptions from message handlers.

Various test fixes.
This commit is contained in:
Pavel Minaev 2019-08-26 19:01:00 -07:00 committed by Pavel Minaev
parent 30615996ba
commit 340942aafc
47 changed files with 2313 additions and 2682 deletions

View file

@ -114,8 +114,9 @@ if __name__ == '__main__':
package_dir={'': 'src'},
packages=[
'ptvsd',
'ptvsd.common',
'ptvsd.adapter',
'ptvsd.common',
'ptvsd.launcher',
'ptvsd.server',
'ptvsd._vendored',
],

View file

@ -6,59 +6,44 @@ from __future__ import absolute_import, print_function, unicode_literals
import argparse
import locale
import os
import sys
# WARNING: ptvsd and submodules must not be imported on top level in this module,
# and should be imported locally inside main() instead.
# Force absolute path on Python 2.
__file__ = os.path.abspath(__file__)
def main(args):
import ptvsd
from ptvsd.common import log, options
from ptvsd.adapter import channels
from ptvsd.common import log, options as common_options
from ptvsd.adapter import session, options as adapter_options
if args.cls and args.debug_server is not None:
print("\033c")
if args.cls:
sys.stderr.write("\033c")
if args.log_stderr:
adapter_options.log_stderr = True
if args.log_dir is not None:
common_options.log_dir = args.log_dir
options.log_dir = args.log_dir
log.stderr_levels |= {"info"}
log.filename_prefix = "ptvsd.adapter"
log.stderr_levels |= {"info"}
log.to_file()
log.describe_environment("ptvsd.adapter startup environment:")
session = session.Session()
if args.debug_server is None:
address = None
session.connect_to_ide()
else:
address = ("localhost", args.debug_server)
# If in debugServer mode, log "debug" to stderr as well.
log.stderr_levels |= {"debug"}
chan = channels.Channels()
ide = chan.connect_to_ide(address)
ide.start()
ide.send_event(
"output",
{
"category": "telemetry",
"output": "ptvsd.adapter",
"data": {"version": ptvsd.__version__},
},
)
# Wait until the IDE debug session is over - everything interesting is going to
# be happening on the background threads running the IDE and the server message
# loops from here on.
ide.wait()
# Make sure the server message loop is also done, but only if the server connection
# has been established.
server = chan.server()
if server is not None:
server.wait()
# If in debugServer mode, log everything to stderr.
log.stderr_levels |= set(log.LEVELS)
with session.accept_connection_from_ide(("localhost", args.debug_server)):
pass
session.wait_for_completion()
def _parse_argv():
def _parse_argv(argv):
parser = argparse.ArgumentParser()
parser.add_argument(
@ -83,7 +68,14 @@ def _parse_argv():
help="enable logging and use DIR to save adapter logs",
)
return parser.parse_args()
parser.add_argument(
"--log-stderr", action="store_true", help="enable logging to stderr"
)
args = parser.parse_args(argv[1:])
if args.debug_server is None and args.log_stderr:
parser.error("--log-stderr can only be used with --debug-server")
return args
if __name__ == "__main__":
@ -117,4 +109,4 @@ if __name__ == "__main__":
# Load locale settings.
locale.setlocale(locale.LC_ALL, "")
main(_parse_argv())
main(_parse_argv(sys.argv))

View file

@ -1,154 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
import os
import sys
from ptvsd.common import log, messaging, singleton, sockets
ACCEPT_CONNECTIONS_TIMEOUT = 10
class Channels(singleton.ThreadSafeSingleton):
_ide = None
@singleton.autolocked_method
def ide(self):
"""DAP channel to the IDE over stdin/stdout.
Created by main() as soon as the adapter process starts.
If the IDE has disconnected, this method still returns the closed channel.
"""
return self._ide
_server = None
@singleton.autolocked_method
def server(self):
"""DAP channel to the debug server over a socket.
Created when handling the "attach" or "launch" request.
When the server disconnects, the channel remains, but is closed, and will raise
NoMoreMessages on writes.
"""
return self._server
@singleton.autolocked_method
def connect_to_ide(self, address=None):
"""Creates a DAP message channel to the IDE, and returns that channel.
If address is not None, the channel is established by connecting to the TCP
socket listening on that address. Otherwise, the channel is established over
stdio.
Caller is responsible for calling start() on the returned channel.
"""
assert self._ide is None
# Import message handlers lazily to avoid circular imports.
from ptvsd.adapter import messages
if address is None:
ide_stream = messaging.JsonIOStream.from_stdio("IDE")
# Make sure that nothing else tries to interfere with the stdio streams
# that are going to be used for DAP communication from now on.
sys.stdout = sys.stderr
sys.stdin = open(os.devnull, "r")
else:
host, port = address
listener = sockets.create_server(host, port)
try:
log.info(
"Adapter waiting for connection from IDE on {0}:{1}...", host, port
)
sock, (ide_host, ide_port) = listener.accept()
finally:
listener.close()
log.info("IDE connection accepted from {0}:{1}.", ide_host, ide_port)
ide_stream = messaging.JsonIOStream.from_socket(sock, "IDE")
self._ide = messaging.JsonMessageChannel(
ide_stream, messages.IDEMessages(), ide_stream.name
)
return self._ide
@singleton.autolocked_method
def connect_to_server(self, address):
"""Creates a DAP message channel to the server, and returns that channel.
The channel is established by connecting to the TCP socket listening on the
specified address
Caller is responsible for calling start() on the returned channel.
"""
assert self._server is None
# Import message handlers lazily to avoid circular imports.
from ptvsd.adapter import messages
host, port = address
sock = sockets.create_client()
sock.connect(address)
server_stream = messaging.JsonIOStream.from_socket(sock, "server")
self._server = messaging.JsonMessageChannel(
server_stream, messages.ServerMessages(), server_stream.name
)
return self._server
@singleton.autolocked_method
def accept_connection_from_server(self, address, before_accept=(lambda _: None)):
"""Creates a DAP message channel to the server, and returns that channel.
The channel is established by listening on the specified address until there
is an incoming TCP connection. Only one incoming connection is accepted.
before_accept((host, port)) is invoked after the listener socket has been
set up, but before the thread blocks waiting for incoming connection. This
provides access to the actual port number if port=0.
Caller is responsible for calling start() on the returned channel.
"""
assert self._server is None
# Import message handlers lazily to avoid circular imports.
from ptvsd.adapter import messages
host, port = address
listener = sockets.create_server(host, port, ACCEPT_CONNECTIONS_TIMEOUT)
host, port = listener.getsockname()
log.info(
"Adapter waiting for connection from debug server on {0}:{1}...", host, port
)
before_accept((host, port))
try:
sock, (server_host, server_port) = listener.accept()
finally:
listener.close()
log.info(
"Debug server connection accepted from {0}:{1}.", server_host, server_port
)
server_stream = messaging.JsonIOStream.from_socket(sock, "server")
self._server = server = messaging.JsonMessageChannel(
server_stream, messages.ServerMessages(), server_stream.name
)
return server
@singleton.autolocked_method
def close_server(self):
assert self._server is not None
try:
self._server.close()
except Exception:
log.exception("Error while closing server channel:")
self._server = None

View file

@ -0,0 +1,166 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
import functools
from ptvsd.common import fmt, json, log, messaging, util
ACCEPT_CONNECTIONS_TIMEOUT = 10
class ComponentNotAvailable(Exception):
def __init__(self, type):
super(ComponentNotAvailable, self).__init__(
fmt("{0} is not available", type.__name__)
)
class Component(util.Observable):
"""A component managed by a debug adapter: IDE, launcher, or debug server.
Every component belongs to a Session, which is used for synchronization and
shared data.
Every component has its own message channel, and provides message handlers for
that channel. All handlers should be decorated with @Component.message_handler,
which ensures that Session is locked for the duration of the handler. Thus, only
one handler is running at any given time across all components, unless the lock
is released explicitly or via Session.wait_for().
Components report changes to their attributes to Session, allowing one component
to wait_for() a change caused by another component.
"""
def __init__(self, session, stream):
super(Component, self).__init__()
self.session = session
stream.name = str(self)
self.channel = messaging.JsonMessageChannel(stream, self)
self.is_connected = True
self.observers += [lambda *_: session.notify_changed()]
self.channel.start()
def __str__(self):
return fmt("{0}-{1}", type(self).__name__, self.session.id)
@property
def ide(self):
return self.session.ide
@property
def launcher(self):
return self.session.launcher
@property
def server(self):
return self.session.server
def wait_for(self, *args, **kwargs):
return self.session.wait_for(*args, **kwargs)
@staticmethod
def message_handler(f):
"""Applied to a message handler to automatically lock and unlock the session
for its duration, and to validate the session state.
If the handler raises ComponentNotAvailable or JsonIOError, converts it to
Message.cant_handle().
"""
@functools.wraps(f)
def lock_and_handle(self, message):
try:
with self.session:
return f(self, message)
except ComponentNotAvailable as exc:
raise message.cant_handle("{0}", exc, silent=True)
except messaging.MessageHandlingError as exc:
exc.propagate(message)
except messaging.JsonIOError as exc:
raise message.cant_handle("{0} disconnected unexpectedly", exc.stream.name, silent=True)
return lock_and_handle
def disconnect(self):
with self.session:
self.is_connected = False
self.session.finalize(fmt("{0} has disconnected", self))
def missing(session, type):
class Missing(object):
"""A dummy component that raises ComponentNotAvailable whenever some
attribute is accessed on it.
"""
__getattr__ = __setattr__ = lambda self, *_: report()
__bool__ = __nonzero__ = lambda self: False
def report():
try:
raise ComponentNotAvailable(type)
except Exception as exc:
raise log.exception("{0} in {1}", exc, session)
return Missing()
class Capabilities(dict):
"""A collection of feature flags for a component. Corresponds to JSON properties
in the DAP "initialize" request or response, other than those that identify the
party.
"""
PROPERTIES = {}
"""JSON property names and default values for the the capabilities represented
by instances of this class. Keys are names, and values are either default values
or validators.
If the value is callable, it must be a JSON validator; see ptvsd.common.json for
details. If the value is not callable, it is as if json.default(value) validator
was used instead.
"""
def __init__(self, component, message):
"""Parses an "initialize" request or response and extracts the feature flags.
For every "X" in self.PROPERTIES, sets self["X"] to the corresponding value
from message.payload if it's present there, or to the default value otherwise.
"""
assert message.is_request("initialize") or message.is_response("initialize")
self.component = component
payload = message.payload
for name, validate in self.PROPERTIES.items():
value = payload.get(name, ())
if not callable(validate):
validate = json.default(validate)
try:
value = validate(value)
except Exception as exc:
raise message.isnt_valid("{0!j} {1}", name, exc)
assert value != (), fmt(
"{0!j} must provide a default value for missing properties.", validate
)
self[name] = value
log.debug("{0}", self)
def __repr__(self):
return fmt("{0}: {1!j}", type(self).__name__, dict(self))
def require(self, *keys):
for key in keys:
if not self[key]:
raise messaging.MessageHandlingError(
fmt("{0} does not have capability {1!j}", self.component, key)
)

View file

@ -1,147 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
"""Runtime contracts for the IDE and the server.
"""
from ptvsd.common import fmt, json, log, singleton
class Capabilities(dict):
"""A collection of feature flags. Corresponds to JSON properties in the DAP
"initialize" request, other than those that identify the party.
"""
PROPERTIES = {}
"""JSON property names and default values for the the capabilities represented
by instances of this class. Keys are names, and values are either default values
or validators.
If the value is callable, it must be a JSON validator; see ptvsd.common.json for
details. If the value is not callable, it is as if json.default(value) validator
was used instead.
"""
def __init__(self, message):
"""Parses an "initialize" request or response and extracts the feature flags.
For every "X" in self.PROPERTIES, sets self["X"] to the corresponding value
from message.payload if it's present there, or to the default value otherwise.
"""
payload = message.payload
for name, validate in self.PROPERTIES.items():
value = payload.get(name, ())
if not callable(validate):
validate = json.default(validate)
try:
value = validate(value)
except Exception as exc:
raise message.isnt_valid("{0!j} {1}", name, exc)
assert value != (), fmt(
"{0!j} must provide a default value for missing properties.", validate
)
self[name] = value
log.debug("{0}", self)
def __repr__(self):
return fmt("{0}: {1!j}", type(self).__name__, dict(self))
class IDECapabilities(Capabilities):
PROPERTIES = {
"supportsVariableType": False,
"supportsVariablePaging": False,
"supportsRunInTerminalRequest": False,
"supportsMemoryReferences": False,
}
class ServerCapabilities(Capabilities):
PROPERTIES = {
"supportsConfigurationDoneRequest": False,
"supportsFunctionBreakpoints": False,
"supportsConditionalBreakpoints": False,
"supportsHitConditionalBreakpoints": False,
"supportsEvaluateForHovers": False,
"supportsStepBack": False,
"supportsSetVariable": False,
"supportsRestartFrame": False,
"supportsGotoTargetsRequest": False,
"supportsStepInTargetsRequest": False,
"supportsCompletionsRequest": False,
"supportsModulesRequest": False,
"supportsRestartRequest": False,
"supportsExceptionOptions": False,
"supportsValueFormattingOptions": False,
"supportsExceptionInfoRequest": False,
"supportTerminateDebuggee": False,
"supportsDelayedStackTraceLoading": False,
"supportsLoadedSourcesRequest": False,
"supportsLogPoints": False,
"supportsTerminateThreadsRequest": False,
"supportsSetExpression": False,
"supportsTerminateRequest": False,
"supportsDataBreakpoints": False,
"supportsReadMemoryRequest": False,
"supportsDisassembleRequest": False,
"exceptionBreakpointFilters": [],
"additionalModuleColumns": [],
"supportedChecksumAlgorithms": [],
}
class IDEExpectations(Capabilities):
PROPERTIES = {
"locale": "en-US",
"linesStartAt1": True,
"columnsStartAt1": True,
"pathFormat": json.enum("path"), # we don't support "uri"
}
# Contracts don't have to be thread-safe. The reason is that both contracts are parsed
# while handling IDE messages, so the IDE message loop doesn't need to synchronize;
# and on the other hand, the server message loop is not started until the contracts
# are parsed, and thus cannot observe any changes.
class IDEContract(singleton.Singleton):
"""The contract for the IDE side. Identifies the IDE client, and describes its
capabilities, and expectations from the adapter.
"""
clientID = None
capabilities = None
expectations = None
def parse(self, message):
assert self.capabilities is None and self.expectations is None
assert message.is_request("initialize")
self.client_id = message.arguments.get("clientID", "")
self.capabilities = IDECapabilities(message)
self.expectations = IDEExpectations(message)
class ServerContract(singleton.Singleton):
"""The contract for the server side. Describes its capabilities.
"""
capabilities = None
def parse(self, message):
assert self.capabilities is None
assert message.is_response("initialize")
self.capabilities = ServerCapabilities(message)
ide = IDEContract()
server = ServerContract()

View file

@ -1,712 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
"""Manages the lifetime of the debugged process and its subprocesses, in scenarios
where it is controlled by the adapter (i.e. "launch").
"""
import atexit
import codecs
import collections
import functools
import locale
import os
import platform
import signal
import socket
import subprocess
import sys
import threading
import ptvsd.__main__
from ptvsd.adapter import channels, contract
from ptvsd.common import compat, fmt, json, launcher, messaging, log, singleton
from ptvsd.common.compat import unicode
terminate_at_exit = True
"""Whether the debuggee process should be terminated when the adapter process exits,
or allowed to continue running.
"""
exit_code = None
"""The exit code of the debuggee process, once it has terminated."""
pid = None
"""Debuggee process ID."""
process_name = None
"""Debuggee process name."""
_captured_output = {}
"""Keys are output categories, values are CaptureOutput instances."""
_got_pid = threading.Event()
"""A threading.Event that is set when pid is set.
"""
_exited = None
"""A threading.Event that is set when the debuggee process exits.
Created when the process is spawned.
"""
SpawnInfo = collections.namedtuple(
"SpawnInfo",
["console", "console_title", "cmdline", "cwd", "env", "redirect_output"],
)
def spawn_and_connect(request):
"""Spawns the process as requested by the DAP "launch" request, with the debug
server running inside the process; and connects to that server. Returns the
server channel.
Caller is responsible for calling start() on the returned channel.
"""
if request("noDebug", json.default(False)):
_parse_request_and_spawn(request, None)
else:
channels.Channels().accept_connection_from_server(
("127.0.0.1", 0),
before_accept=lambda address: _parse_request_and_spawn(request, address),
)
def attach_by_pid(request):
"""Start server to receive connection from the debug server injected into the
debuggee process.
"""
def _parse_request_and_inject(request, address):
host, port = address
ptvsd_args = request("ptvsdArgs", json.array(unicode))
cmdline = [
sys.executable,
compat.filename(ptvsd.__main__.__file__),
"--client",
"--host",
host,
"--port",
str(port),
] + ptvsd_args + [
"--pid",
str(request("processId", int))
]
log.debug("Launching debugger injector: {0!r}", cmdline)
try:
# This process will immediately exit after injecting debug server
subprocess.Popen(
cmdline,
bufsize=0,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
except Exception as exc:
raise request.cant_handle("Error launching debug process: {0}", exc)
channels.Channels().accept_connection_from_server(
("127.0.0.1", 0),
before_accept=lambda address: _parse_request_and_inject(request, address),
)
def _parse_request_and_spawn(request, address):
spawn_info = _parse_request(request, address)
log.debug(
"SpawnInfo = {0!j}",
collections.OrderedDict(
{
"console": spawn_info.console,
"cwd": spawn_info.cwd,
"cmdline": spawn_info.cmdline,
"env": spawn_info.env,
}
),
)
spawn = {
"internalConsole": _spawn_popen,
"integratedTerminal": _spawn_terminal,
"externalTerminal": _spawn_terminal,
}[spawn_info.console]
global _exited
_exited = threading.Event()
try:
spawn(request, spawn_info)
finally:
if pid is None:
_exited.set()
else:
atexit.register(lambda: terminate() if terminate_at_exit else None)
def _parse_request(request, address):
"""Parses a "launch" request and returns SpawnInfo for it.
address is (host, port) on which the adapter listener is waiting for connection
from the debug server.
"""
assert request.is_request("launch")
debug_options = set(request("debugOptions", json.array(unicode)))
# Handling of properties that can also be specified as legacy "debugOptions" flags.
# If property is explicitly set to false, but the flag is in "debugOptions", treat
# it as an error.
def property_or_debug_option(prop_name, flag_name):
assert prop_name[0].islower() and flag_name[0].isupper()
value = request(prop_name, json.default(flag_name in debug_options))
if value is False and flag_name in debug_options:
raise request.isnt_valid(
'{0!r}:false and "debugOptions":[{1!r}] are mutually exclusive',
prop_name,
flag_name,
)
return value
console = request(
"console",
json.enum(
"internalConsole", "integratedTerminal", "externalTerminal", optional=True
),
)
if console != "internalConsole":
if not contract.ide.capabilities["supportsRunInTerminalRequest"]:
raise request.cant_handle(
'Unable to launch via "console":{0!j}, because the IDE is does not '
'have the "supportsRunInTerminalRequest" capability',
console,
)
console_title = request("consoleTitle", json.default("Python Debug Console"))
cmdline = []
if property_or_debug_option("sudo", "Sudo"):
if platform.system() == "Windows":
raise request.cant_handle('"sudo":true is not supported on Windows.')
else:
cmdline += ["sudo"]
# "pythonPath" is a deprecated legacy spelling. If "python" is missing, then try
# the alternative. But if both are missing, the error message should say "python".
python_key = "python"
if python_key in request:
if "pythonPath" in request:
raise request.isnt_valid(
'"pythonPath" is not valid if "python" is specified'
)
elif "pythonPath" in request:
python_key = "pythonPath"
python = request(python_key, json.array(unicode, vectorize=True, size=(1,)))
if not len(python):
python = [compat.filename(sys.executable)]
cmdline += python
cmdline += [compat.filename(launcher.__file__)]
if property_or_debug_option("waitOnNormalExit", "WaitOnNormalExit"):
cmdline += ["--wait-on-normal"]
if property_or_debug_option("waitOnAbnormalExit", "WaitOnAbnormalExit"):
cmdline += ["--wait-on-abnormal"]
pid_server_port = start_process_pid_server()
cmdline += ["--internal-port", str(pid_server_port)]
if request("noDebug", json.default(False)):
cmdline += ["--"]
else:
host, port = address
ptvsd_args = request("ptvsdArgs", json.array(unicode))
cmdline += [
"--",
compat.filename(ptvsd.__main__.__file__),
"--client",
"--host",
host,
"--port",
str(port),
] + ptvsd_args
global process_name
program = module = code = ()
if "program" in request:
program = request("program", json.array(unicode, vectorize=True, size=(1,)))
cmdline += program
process_name = program[0]
if "module" in request:
module = request("module", json.array(unicode, vectorize=True, size=(1,)))
cmdline += ["-m"]
cmdline += module
process_name = module[0]
if "code" in request:
code = request("code", json.array(unicode, vectorize=True, size=(1,)))
cmdline += ["-c"]
cmdline += code
process_name = python[0]
num_targets = len([x for x in (program, module, code) if x != ()])
if num_targets == 0:
raise request.isnt_valid(
'either "program", "module", or "code" must be specified'
)
elif num_targets != 1:
raise request.isnt_valid(
'"program", "module", and "code" are mutually exclusive'
)
cmdline += request("args", json.array(unicode))
cwd = request("cwd", unicode, optional=True)
if cwd == ():
# If it's not specified, but we're launching a file rather than a module,
# and the specified path has a directory in it, use that.
cwd = None if program == () else (os.path.dirname(program) or None)
env = request("env", json.object(unicode))
redirect_output = "RedirectOutput" in debug_options
if redirect_output:
# sys.stdout buffering must be disabled - otherwise we won't see the output
# at all until the buffer fills up.
env["PYTHONUNBUFFERED"] = "1"
return SpawnInfo(console, console_title, cmdline, cwd, env, redirect_output)
def _spawn_popen(request, spawn_info):
env = os.environ.copy()
env.update(spawn_info.env)
pid_server_port = start_process_pid_server()
env["PTVSD_PID_SERVER_PORT"] = str(pid_server_port)
cmdline = spawn_info.cmdline
if sys.version_info < (3,):
# Popen() expects command line and environment to be bytes, not Unicode.
# Assume that values are filenames - it's usually either that, or numbers -
# but don't allow encoding to fail if we guessed wrong.
encode = functools.partial(compat.filename_bytes, errors="replace")
cmdline = [encode(s) for s in cmdline]
env = {encode(k): encode(v) for k, v in env.items()}
close_fds = set()
try:
if spawn_info.redirect_output:
# subprocess.PIPE behavior can vary substantially depending on Python version
# and platform; using our own pipes keeps it simple, predictable, and fast.
stdout_r, stdout_w = os.pipe()
stderr_r, stderr_w = os.pipe()
close_fds |= {stdout_r, stdout_w, stderr_r, stderr_w}
else:
# Let it write directly to stdio. If stdout is being used for the IDE DAP
# channel, sys.stdout is already pointing to stderr.
stdout_w = sys.stdout.fileno()
stderr_w = sys.stderr.fileno()
try:
proc = subprocess.Popen(
spawn_info.cmdline,
cwd=spawn_info.cwd,
env=env,
bufsize=0,
stdin=sys.stdin,
stdout=stdout_w,
stderr=stderr_w,
)
except Exception as exc:
raise request.cant_handle(
"Error launching process: {0}\n\nCommand line:{1!r}",
exc,
spawn_info.cmdline,
)
log.info("Spawned launcher process with PID={0}.", proc.pid)
try:
wait_for_pid()
ProcessTracker().track(pid)
except Exception:
# If we can't track it, we won't be able to terminate it if asked; but aside
# from that, it does not prevent debugging.
log.exception(
"Unable to track debuggee process with PID={0}.",
pid,
category="warning",
)
if spawn_info.redirect_output:
global output_redirected
output_redirected = spawn_info.redirect_output
encoding = env.get("PYTHONIOENCODING", locale.getpreferredencoding())
for category, fd, tee in [
("stdout", stdout_r, sys.stdout),
("stderr", stderr_r, sys.stderr),
]:
CaptureOutput(category, fd, tee.fileno(), encoding)
close_fds.remove(fd)
# Wait directly on the Popen object, instead of going via ProcessTracker. This is
# more reliable on Windows, because Popen always has the correct process handle
# that it gets from CreateProcess, whereas ProcessTracker will use OpenProcess to
# get it from PID, and there's a race condition there if the process dies and its
# PID is reused before OpenProcess is called.
def wait_for_exit():
try:
code = proc.wait()
except Exception:
log.exception("Couldn't determine process exit code:")
code = -1
finally:
_report_exit(code)
wait_thread = threading.Thread(target=wait_for_exit, name='"launch" worker')
wait_thread.start()
finally:
for fd in close_fds:
try:
os.close(fd)
except Exception:
log.exception()
def _spawn_terminal(request, spawn_info):
kinds = {"integratedTerminal": "integrated", "externalTerminal": "external"}
body = {
"kind": kinds[spawn_info.console],
"title": spawn_info.console_title,
"cwd": spawn_info.cwd,
"args": spawn_info.cmdline,
"env": spawn_info.env,
}
try:
channels.Channels().ide().request("runInTerminal", body)
except messaging.MessageHandlingError as exc:
exc.propagate(request)
try:
wait_for_pid()
ProcessTracker().track(pid, after_exit=_report_exit)
except Exception as exc:
# If we can't track it, we won't be able to terminate it if asked; but aside
# from that, it does not prevent debugging.
log.exception(
"Unable to track debuggee process with PID={0}: {1}.",
pid,
str(exc),
category="warning",
)
def _report_exit(code):
global exit_code
exit_code = code
ide = channels.Channels().ide()
if ide is not None:
try:
wait_for_remaining_output()
ide.send_event("exited", {"exitCode": -1 if code is None else code})
ide.send_event("terminated")
except Exception:
pass # channel to IDE is already closed
_exited.set()
def wait_for_pid(timeout=None):
"""Waits for debuggee PID to be determined.
Returns True if PID was determined, False if the wait timed out. If it returned
True, then pid is guaranteed to be set.
"""
return _got_pid.wait(timeout)
def wait_for_exit(timeout=None):
"""Waits for the debuggee process to exit.
Returns True if the process exited, False if the wait timed out. If it returned
True, then exit_code is guaranteed to be set.
"""
if pid is None:
# Debuggee was launched with "runInTerminal", but the debug session fell apart
# before we got a "process" event and found out what its PID is. It's not a
# fatal error, but there's nothing to wait on. Debuggee process should have
# exited (or crashed) by now in any case.
return
assert _exited is not None
timed_out = not _exited.wait(timeout)
if not timed_out:
# ProcessTracker will stop tracking it by itself, but it might take a bit
# longer for it to notice that the process is gone. If killall() is invoked
# before that, it will try to kill that non-existing process, and log the
# resulting error. This prevents that.
ProcessTracker().stop_tracking(pid)
return not timed_out
def terminate(after=0):
"""Waits for the debuggee process to exit for the specified number of seconds. If
the process or any subprocesses are still alive after that time, force-kills them.
If any errors occur while trying to kill any process, logs and swallows them.
If the debuggee process hasn't been spawned yet, does nothing.
"""
if _exited is None:
return
wait_for_exit(after)
ProcessTracker().killall()
def register_subprocess(pid):
"""Registers a subprocess of the debuggee process."""
ProcessTracker().track(pid)
class ProcessTracker(singleton.ThreadSafeSingleton):
"""Tracks processes that belong to the debuggee.
"""
_processes = {}
"""Keys are PIDs, and values are handles as used by os.waitpid(). On Windows,
handles are distinct. On all other platforms, the PID is also the handle.
"""
_exit_codes = {}
"""Keys are PIDs, values are exit codes."""
@singleton.autolocked_method
def track(self, pid, after_exit=lambda _: None):
"""Starts tracking the process with the specified PID, and returns its handle.
If the process exits while it is still being tracked, after_exit is invoked
with its exit code.
"""
# Register the atexit handler only once, on the first tracked process.
if not len(self._processes):
atexit.register(lambda: self.killall() if terminate_at_exit else None)
self._processes[pid] = handle = _pid_to_handle(pid)
log.debug(
"Tracking debuggee process with PID={0} and HANDLE=0x{1:08X}.", pid, handle
)
def wait_for_exit():
try:
_, exit_code = os.waitpid(handle, 0)
except Exception:
exit_code = -1
log.exception(
"os.waitpid() for debuggee process with HANDLE=0x{0:08X} failed:",
handle,
)
else:
exit_code >>= 8
log.info(
"Debuggee process with PID={0} exited with exitcode {1}.",
pid,
exit_code,
)
with self:
if pid in self._processes:
self._exit_codes[pid] = exit_code
self.stop_tracking(pid)
after_exit(exit_code)
wait_thread = threading.Thread(
target=wait_for_exit, name=fmt("Process(pid={0}) tracker", pid)
)
wait_thread.daemon = True
wait_thread.start()
return handle
@singleton.autolocked_method
def stop_tracking(self, pid):
if self._processes.pop(pid, None) is not None:
log.debug("Stopped tracking debuggee process with PID={0}.", pid)
@singleton.autolocked_method
def killall(self):
pids = list(self._processes.keys())
for pid in pids:
log.info("Killing debuggee process with PID={0}.", pid)
try:
os.kill(pid, signal.SIGTERM)
except Exception:
log.exception("Couldn't kill debuggee process with PID={0}:", pid)
if platform.system() != "Windows":
_pid_to_handle = lambda pid: pid
else:
import ctypes
from ctypes import wintypes
class ProcessAccess(wintypes.DWORD):
PROCESS_QUERY_LIMITED_INFORMATION = 0x1000
SYNCHRONIZE = 0x100000
OpenProcess = ctypes.windll.kernel32.OpenProcess
OpenProcess.restype = wintypes.HANDLE
OpenProcess.argtypes = (ProcessAccess, wintypes.BOOL, wintypes.DWORD)
def _pid_to_handle(pid):
handle = OpenProcess(
ProcessAccess.PROCESS_QUERY_LIMITED_INFORMATION | ProcessAccess.SYNCHRONIZE,
False,
pid,
)
if not handle:
raise ctypes.WinError()
return handle
class CaptureOutput(object):
"""Captures output from the specified file descriptor, and tees it into another
file descriptor while generating DAP "output" events for it.
"""
def __init__(self, category, fd, tee_fd, encoding):
assert category not in _captured_output
log.info("Capturing {0} of debuggee process with PID={1}.", category, pid)
self.category = category
self._fd = fd
self._tee_fd = tee_fd
# Do this here instead of _worker(), so that exceptions propagate to caller.
self._ide = channels.Channels().ide()
try:
self._decoder = codecs.getincrementaldecoder(encoding)(errors="replace")
except LookupError:
self._decoder = None
log.warning(
"Unable to capture {0} - unknown encoding {1!r}", category, encoding
)
else:
_captured_output[category] = self
self._worker_thread = threading.Thread(target=self._worker, name=category)
self._worker_thread.start()
def __del__(self):
fd = self._fd
if fd is not None:
try:
os.close(fd)
except Exception:
pass
def _send_output_event(self, s, final=False):
if self._decoder is None:
return
s = self._decoder.decode(s, final=final)
if len(s) == 0:
return
try:
self._ide.send_event("output", {"category": self.category, "output": s})
except Exception:
pass # channel to IDE is already closed
def _worker(self):
while self._fd is not None:
try:
s = os.read(self._fd, 0x1000)
except Exception:
break
size = len(s)
if size == 0:
break
# Tee the output first, before sending the "output" event.
i = 0
while i < size:
written = os.write(self._tee_fd, s[i:])
i += written
if not written:
# This means that the output stream was closed from the other end.
# Do the same to the debuggee, so that it knows as well.
os.close(self._fd)
self._fd = None
break
self._send_output_event(s)
# Flush any remaining data in the incremental decoder.
self._send_output_event(b"", final=True)
def wait_for_remaining_output(self):
log.info(
"Waiting for remaining {0} of debuggee process with PID={1}.",
self.category,
pid,
)
self._worker_thread.join()
def is_capturing_output(category):
"""Returns True if the specified output category is being captured directly,
and DAP "output" events are being generated for it.
Category corresponds to "category" property in the "output" event - "stdout",
"stderr" etc.
"""
return category in _captured_output
def wait_for_remaining_output():
"""Waits for all remaining output to be captured and propagated.
"""
for co in _captured_output.values():
co.wait_for_remaining_output()
def start_process_pid_server():
listener = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
listener.bind(("127.0.0.1", 0))
listener.listen(1)
host, port = listener.getsockname()
log.info("Adapter waiting for connection from launcher on {0}:{1}...", host, port)
def _worker():
try:
sock, (l_host, l_port) = listener.accept()
finally:
listener.close()
log.info("Launcher connection accepted from {0}:{1}.", l_host, l_port)
try:
data = sock.makefile().read()
finally:
sock.close()
global pid
pid = -1 if data == b"" else int(data)
_got_pid.set()
log.info("Debuggee process Id received: {0}", pid)
wait_thread = threading.Thread(target=_worker, name="Process Pid Server")
wait_thread.daemon = True
wait_thread.start()
return port

290
src/ptvsd/adapter/ide.py Normal file
View file

@ -0,0 +1,290 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
import platform
import ptvsd
from ptvsd.common import json, log, messaging
from ptvsd.common.compat import unicode
from ptvsd.adapter import components
class IDE(components.Component):
"""Handles the IDE side of a debug session."""
message_handler = components.Component.message_handler
class Capabilities(components.Capabilities):
PROPERTIES = {
"supportsVariableType": False,
"supportsVariablePaging": False,
"supportsRunInTerminalRequest": False,
"supportsMemoryReferences": False,
}
class Expectations(components.Capabilities):
PROPERTIES = {
"locale": "en-US",
"linesStartAt1": True,
"columnsStartAt1": True,
"pathFormat": json.enum("path"), # we don't support "uri"
}
def __init__(self, session, stream):
super(IDE, self).__init__(session, stream)
self.client_id = None
"""ID of the connecting client. This can be 'test' while running tests."""
self._initialize_request = None
"""The "initialize" request as received from the IDE, to propagate to the
server later."""
self._deferred_events = []
"""Deferred events from the launcher and the server that must be propagated
only if and when the "launch" or "attach" response is sent.
"""
assert not session.ide
session.ide = self
self.channel.send_event(
"output",
{
"category": "telemetry",
"output": "ptvsd.adapter",
"data": {"version": ptvsd.__version__},
},
)
def propagate_after_start(self, event):
# pydevd starts sending events as soon as we connect, but the IDE doesn't
# expect to see any until it receives the response to "launch" or "attach"
# request. If IDE is not ready yet, save the event instead of propagating
# it immediately.
if self._deferred_events is not None:
self._deferred_events.append(event)
log.debug("Propagation deferred.")
else:
self.ide.channel.propagate(event)
def _propagate_deferred_events(self):
log.debug("Propagating deferred events to {0}...", self.ide)
for event in self._deferred_events:
log.debug("Propagating deferred {0}", event.describe())
self.ide.channel.propagate(event)
log.info("All deferred events propagated to {0}.", self.ide)
self._deferred_events = None
# Generic event handler. There are no specific handlers for IDE events, because
# there are no events from the IDE in DAP - but we propagate them if we can, in
# case some events appear in future protocol versions.
@message_handler
def event(self, event):
if self.server:
self.server.channel.propagate(event)
# Generic request handler, used if there's no specific handler below.
@message_handler
def request(self, request):
return self.server.channel.delegate(request)
@message_handler
def initialize_request(self, request):
if self._initialize_request is not None:
raise request.isnt_valid("Session is already initialized")
self.client_id = request("clientID", "")
self.capabilities = self.Capabilities(self, request)
self.expectations = self.Expectations(self, request)
self._initialize_request = request
return {
"supportsCompletionsRequest": True,
"supportsConditionalBreakpoints": True,
"supportsConfigurationDoneRequest": True,
"supportsDebuggerProperties": True,
"supportsDelayedStackTraceLoading": True,
"supportsEvaluateForHovers": True,
"supportsExceptionInfoRequest": True,
"supportsExceptionOptions": True,
"supportsHitConditionalBreakpoints": True,
"supportsLogPoints": True,
"supportsModulesRequest": True,
"supportsSetExpression": True,
"supportsSetVariable": True,
"supportsValueFormattingOptions": True,
"supportsTerminateDebuggee": True,
"supportsGotoTargetsRequest": True,
"exceptionBreakpointFilters": [
{"filter": "raised", "label": "Raised Exceptions", "default": False},
{"filter": "uncaught", "label": "Uncaught Exceptions", "default": True},
],
}
# Common code for "launch" and "attach" request handlers.
#
# See https://github.com/microsoft/vscode/issues/4902#issuecomment-368583522
# for the sequence of request and events necessary to orchestrate the start.
def _start_message_handler(f):
f = components.Component.message_handler(f)
def handle(self, request):
assert request.is_request("launch", "attach")
if self._initialize_request is None:
raise request.isnt_valid("Session is not initialized yet")
if self.launcher or self.server:
raise request.isnt_valid("Session is already started")
self.session.no_debug = request("noDebug", json.default(False))
self.session.debug_options = set(
request("debugOptions", json.array(unicode))
)
f(self, request)
if self.server:
self.server.initialize(self._initialize_request)
self._initialize_request = None
# pydevd doesn't send "initialized", and responds to the start request
# immediately, without waiting for "configurationDone". If it changes
# to conform to the DAP spec, we'll need to defer waiting for response.
self.server.channel.delegate(request)
if self.session.no_debug:
request.respond({})
self._propagate_deferred_events()
return
if {"WindowsClient", "Windows"} & self.session.debug_options:
client_os_type = "WINDOWS"
elif {"UnixClient", "UNIX"} & self.session.debug_options:
client_os_type = "UNIX"
else:
client_os_type = "WINDOWS" if platform.system() == "Windows" else "UNIX"
self.server.channel.request(
"setDebuggerProperty",
{
"skipSuspendOnBreakpointException": ("BaseException",),
"skipPrintBreakpointException": ("NameError",),
"multiThreadsSingleNotification": True,
"ideOS": client_os_type,
},
)
# Let the IDE know that it can begin configuring the adapter.
self.channel.send_event("initialized")
self._start_request = request
return messaging.NO_RESPONSE # will respond on "configurationDone"
return handle
@_start_message_handler
def launch_request(self, request):
sudo = request("sudo", json.default("Sudo" in self.session.debug_options))
if sudo:
if platform.system() == "Windows":
raise request.cant_handle('"sudo":true is not supported on Windows.')
else:
if "Sudo" in self.session.debug_options:
raise request.isnt_valid(
'"sudo":false and "debugOptions":["Sudo"] are mutually exclusive'
)
# Launcher doesn't use the command line at all, but we pass the arguments so
# that they show up in the terminal if we're using "runInTerminal".
if "program" in request:
args = request("program", json.array(unicode, vectorize=True, size=(1,)))
elif "module" in request:
args = ["-m"] + request(
"module", json.array(unicode, vectorize=True, size=(1,))
)
elif "code" in request:
args = ["-c"] + request(
"code", json.array(unicode, vectorize=True, size=(1,))
)
args += request("args", json.array(unicode))
console = request(
"console",
json.enum(
"internalConsole",
"integratedTerminal",
"externalTerminal",
optional=True,
),
)
console_title = request("consoleTitle", json.default("Python Debug Console"))
self.session.spawn_debuggee(request, sudo, args, console, console_title)
@_start_message_handler
def attach_request(self, request):
if self.session.no_debug:
raise request.isnt_valid('"noDebug" is not supported for "attach"')
pid = request("processId", int, optional=True)
if pid == ():
host = request("host", "127.0.0.1")
port = request("port", int)
self.session.connect_to_server((host, port))
else:
ptvsd_args = request("ptvsdArgs", json.array(unicode))
self.session.inject_server(pid, ptvsd_args)
@message_handler
def configurationDone_request(self, request):
if self._start_request is None:
request.cant_handle(
'"configurationDone" is only allowed during handling of a "launch" '
'or an "attach" request'
)
try:
request.respond(self.server.channel.delegate(request))
finally:
self._start_request.respond({})
self._start_request = None
self._propagate_deferred_events()
@message_handler
def pause_request(self, request):
request.arguments["threadId"] = "*"
return self.server.channel.delegate(request)
@message_handler
def continue_request(self, request):
request.arguments["threadId"] = "*"
return self.server.channel.delegate(request)
@message_handler
def ptvsd_systemInfo_request(self, request):
result = {"ptvsd": {"version": ptvsd.__version__}}
if self.server:
try:
pydevd_info = self.server.channel.request("pydevdSystemInfo")
except Exception:
# If the server has already disconnected, or couldn't handle it,
# report what we've got.
pass
else:
result.update(pydevd_info)
return result
@message_handler
def terminate_request(self, request):
self.session.finalize('IDE requested "terminate"', terminate_debuggee=True)
return {}
@message_handler
def disconnect_request(self, request):
self.session.finalize(
'IDE requested "disconnect"',
request("terminateDebuggee", json.default(bool(self.launcher))),
)
return {}

View file

@ -0,0 +1,49 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
from ptvsd.adapter import components
class Launcher(components.Component):
"""Handles the launcher side of a debug session."""
message_handler = components.Component.message_handler
def __init__(self, session, stream):
super(Launcher, self).__init__(session, stream)
self.pid = None
"""Process ID of the debuggee process, as reported by the launcher."""
self.exit_code = None
"""Exit code of the debuggee process."""
assert not session.launcher
session.launcher = self
@message_handler
def process_event(self, event):
self.pid = event("systemProcessId", int)
assert self.session.pid is None
self.session.pid = self.pid
self.ide.propagate_after_start(event)
@message_handler
def output_event(self, event):
self.ide.propagate_after_start(event)
@message_handler
def exited_event(self, event):
self.exit_code = event("exitCode", int)
# We don't want to tell the IDE about this just yet, because it will then
# want to disconnect, and the launcher might still be waiting for keypress
# (if wait-on-exit was enabled). Instead, we'll report the event when we
# receive "terminated" from the launcher, right before it exits.
@message_handler
def terminated_event(self, event):
self.ide.channel.send_event("exited", {"exitCode": self.exit_code})
self.channel.close()

View file

@ -1,555 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
import functools
import platform
import ptvsd
from ptvsd.common import json, log, messaging, singleton
from ptvsd.common.compat import unicode
from ptvsd.adapter import channels, debuggee, contract, options, state
WAIT_FOR_PID_TIMEOUT = 10
class _Shared(singleton.ThreadSafeSingleton):
"""Global state shared between IDE and server handlers, other than contracts.
"""
# Only attributes that are set by IDEMessages and marked as readonly before
# connecting to the server can go in here.
threadsafe_attrs = {"start_method", "terminate_on_disconnect", "client_id"}
start_method = None
"""Either "launch" or "attach", depending on the request used."""
terminate_on_disconnect = True
"""Whether the debuggee process should be terminated on disconnect."""
client_id = None
"""ID of the connecting client. This can be 'test' while running tests."""
class Messages(singleton.Singleton):
# Misc helpers that are identical for both IDEMessages and ServerMessages.
# Shortcut for the IDE channel. This one does not check for None, because in the
# normal stdio channel scenario, the channel will never disconnect. The debugServer
# scenario is for testing purposes only, so it's okay to crash if IDE suddenly
# disconnects in that case.
@property
def _ide(self):
return _channels.ide()
@property
def _server(self):
"""Raises MessageHandingError if the server is not available.
To test whether it is available or not, use _channels.server() instead,
following the guidelines in its docstring.
"""
server = _channels.server()
if server is None:
raise messaging.Message.isnt_valid(
"Connection to debug server is not established yet"
)
return server
# Specifies the allowed adapter states for a message handler - if the corresponding
# message is received in a state that is not listed, the handler is not invoked.
# If the message is a request, a failed response is returned.
@staticmethod
def _only_allowed_while(*states):
def decorate(handler):
@functools.wraps(handler)
def handle_if_allowed(self, message):
current_state = state.current()
if current_state in states:
return handler(self, message)
raise message.isnt_valid(
"{0} is not allowed in adapter state {1!r}.",
message.describe(),
current_state,
)
return handle_if_allowed
return decorate
class IDEMessages(Messages):
"""Message handlers and the associated global state for the IDE channel.
"""
_only_allowed_while = Messages._only_allowed_while
# The contents of the "initialize" response that is sent from the adapter to the IDE,
# and is expected to match what the debug server sends to the adapter once connected.
_INITIALIZE_RESULT = {
"supportsCompletionsRequest": True,
"supportsConditionalBreakpoints": True,
"supportsConfigurationDoneRequest": True,
"supportsDebuggerProperties": True,
"supportsDelayedStackTraceLoading": True,
"supportsEvaluateForHovers": True,
"supportsExceptionInfoRequest": True,
"supportsExceptionOptions": True,
"supportsHitConditionalBreakpoints": True,
"supportsLogPoints": True,
"supportsModulesRequest": True,
"supportsSetExpression": True,
"supportsSetVariable": True,
"supportsValueFormattingOptions": True,
"supportTerminateDebuggee": True,
"supportsGotoTargetsRequest": True,
"exceptionBreakpointFilters": [
{"filter": "raised", "label": "Raised Exceptions", "default": False},
{"filter": "uncaught", "label": "Uncaught Exceptions", "default": True},
],
}
# Until the server message loop is, this isn't really shared, so we can simplify
# synchronization by keeping it exclusive until then. This way, all attributes
# that are computed during initialization and never change after don't need to be
# synchronized at all.
_shared = _Shared(shared=False)
# Until "launch" or "attach", there's no debug server yet, and so we can't propagate
# messages. But they will need to be replayed once we establish connection to server,
# so store them here until then. After all messages are replayed, it is set to None.
_initial_messages = []
# "launch" or "attach" request that started debugging.
_start_request = None
# "noDebug" flag is set when user selects run without debugging.
_no_debug = False
# A decorator to add the message to initial_messages if needed before handling it.
# Must be applied to the handler for every message that can be received before
# connection to the debug server can be established while handling attach/launch,
# and that must be replayed to the server once it is established.
def _replay_to_server(handler):
@functools.wraps(handler)
def store_and_handle(self, message):
if self._initial_messages is not None:
self._initial_messages.append(message)
return handler(self, message)
return store_and_handle
# Generic event handler. There are no specific handlers for IDE events, because
# there are no events from the IDE in DAP - but we propagate them if we can, in
# case some events appear in future protocol versions.
@_replay_to_server
def event(self, event):
server = _channels.server()
if server is not None:
server.propagate(event)
# Generic request handler, used if there's no specific handler below.
def request(self, request):
server = _channels.server()
if not self._no_debug or server is not None:
return self._server.delegate(request)
if self._no_debug:
raise request.isnt_valid("when running without debugger.")
if not server:
raise request.isnt_valid("when debug server not ready.")
@_replay_to_server
@_only_allowed_while("starting")
def initialize_request(self, request):
contract.ide.parse(request)
state.change("initializing")
self._shared.client_id = request.arguments.get("clientID", "vscode")
_Shared.readonly_attrs.add("client_id")
return self._INITIALIZE_RESULT
# Handles various attributes common to both "launch" and "attach".
def _debug_config(self, request):
assert request.is_request("launch", "attach")
self._shared.start_method = request.command
_Shared.readonly_attrs.add("start_method")
# We're about to connect to the server and start the message loop for its
# handlers, so _shared is actually going to be shared from now on.
self._shared.share()
# TODO: handle "logToFile". Maybe also "trace" (to Debug Output) like Node.js?
pass
@_replay_to_server
@_only_allowed_while("initializing")
def launch_request(self, request):
self._debug_config(request)
debuggee.spawn_and_connect(request)
return self._configure(request)
@_replay_to_server
@_only_allowed_while("initializing")
def attach_request(self, request):
if request("noDebug", json.default(False)):
raise request.isnt_valid('"noDebug" is not valid for Attach')
self._shared.terminate_on_disconnect = False
_Shared.readonly_attrs.add("terminate_on_disconnect")
self._debug_config(request)
if "processId" in request:
debuggee.attach_by_pid(request)
else:
options.host = request("host", options.host)
options.port = request("port", options.port)
_channels.connect_to_server(address=(options.host, options.port))
return self._configure(request)
def _set_debugger_properties(self, request):
debug_options = set(request("debugOptions", json.array(unicode)))
client_os_type = None
if "WindowsClient" in debug_options or "WINDOWS" in debug_options:
client_os_type = "WINDOWS"
elif "UnixClient" in debug_options or "UNIX" in debug_options:
client_os_type = "UNIX"
else:
client_os_type = "WINDOWS" if platform.system() == "Windows" else "UNIX"
try:
self._server.request(
"setDebuggerProperty",
arguments={
"skipSuspendOnBreakpointException": ("BaseException",),
"skipPrintBreakpointException": ("NameError",),
"multiThreadsSingleNotification": True,
"ideOS": client_os_type,
},
)
except messaging.MessageHandlingError as exc:
exc.propagate(request)
# Handles the configuration request sequence for "launch" or "attach", from when
# the "initialized" event is sent, to when "configurationDone" is received; see
# https://github.com/microsoft/vscode/issues/4902#issuecomment-368583522
def _configure(self, request):
assert request.is_request("launch", "attach")
self._no_debug = request("noDebug", json.default(False))
if not self._no_debug:
log.debug("Replaying previously received messages to server.")
assert len(self._initial_messages)
initialize = self._initial_messages.pop(0)
assert initialize.is_request("initialize")
# We want to make sure that no other server message handler can execute until
# we receive and parse the response to "initialize", to avoid race conditions
# with those handlers accessing contract.server. Thus, we send the request and
# register the callback first, and only then start the server message loop.
server_initialize = self._server.propagate(initialize)
server_initialize.on_response(
lambda response: contract.server.parse(response)
)
self._server.start()
server_initialize.wait_for_response()
for msg in self._initial_messages:
# TODO: validate server response to ensure it matches our own earlier.
self._server.propagate(msg)
log.debug("Finished replaying messages to server.")
self._initial_messages = None
self._start_request = request
if request.command == "launch":
# Wait until we have the debuggee PID - we either know it already because we
# have launched it directly, or we'll find out eventually from the "process"
# server event. Either way, we need to know the PID before we can tell the
# server to start debugging, because we need to be able to kill the debuggee
# process if anything goes wrong.
#
# However, we can't block forever, because the debug server can also crash
# before it had a chance to send the event - so wake up periodically, and
# check whether server channel is still alive.
if not debuggee.wait_for_pid(WAIT_FOR_PID_TIMEOUT):
if not self._no_debug and _channels.server() is None:
raise request.cant_handle("Debug server disconnected unexpectedly.")
if not self._no_debug:
self._set_debugger_properties(request)
# Let the IDE know that it can begin configuring the adapter.
state.change("configuring")
self._ide.send_event("initialized")
return messaging.NO_RESPONSE # will respond on "configurationDone"
else:
request.respond({})
state.change("running_nodebug")
# No server to send the "process" event, so do that here.
self._ide.send_event(
"process",
{
"systemProcessId": debuggee.pid,
"name": debuggee.process_name,
"isLocalProcess": True,
"startMethod": request.command,
},
)
@_only_allowed_while("configuring")
def configurationDone_request(self, request):
assert self._start_request is not None
result = self._server.delegate(request)
state.change("running")
ServerMessages().release_events()
request.respond(result)
self._start_request.respond({})
def _disconnect_or_terminate_request(self, request):
assert request.is_request("disconnect") or request.is_request("terminate")
if request("restart", json.default(False)):
raise request.isnt_valid("Restart is not supported")
terminate = (request.command == "terminate") or request(
"terminateDebuggee", json.default(self._shared.terminate_on_disconnect)
)
server = _channels.server()
server_exc = None
terminate_requested = False
result = {}
try:
state.change("shutting_down")
except state.InvalidStateTransition:
# Can happen if the IDE or the server disconnect while we were handling
# this. If it was the server, we want to move on so that we can report
# to the IDE before exiting. If it was the IDE, disconnect() handler has
# already dealt with the server, and there isn't anything else we can do.
pass
else:
if server is not None:
try:
if not self._no_debug:
result = server.delegate(request)
else:
result = {}
except messaging.MessageHandlingError as exc:
# If the server was there, but failed to handle the request, we want
# to propagate that failure back to the IDE - but only after we have
# recorded the state transition and terminated the debuggee if needed.
server_exc = exc
except Exception:
# The server might have already disconnected - this is not an error.
pass
else:
terminate_requested = terminate
if terminate:
# If we asked the server to terminate, give it some time to do so before
# we kill the debuggee process. Otherwise, just kill it immediately.
debuggee.terminate(5 if terminate_requested else 0)
if server_exc is None:
return result
else:
server_exc.propagate(request)
disconnect_request = _disconnect_or_terminate_request
terminate_request = _disconnect_or_terminate_request
@_only_allowed_while("running")
def pause_request(self, request):
request.arguments["threadId"] = "*"
self._server.delegate(request)
@_only_allowed_while("running")
def continue_request(self, request):
request.arguments["threadId"] = "*"
self._server.delegate(request)
return {"allThreadsContinued": True}
@_only_allowed_while("configuring", "running")
def ptvsd_systemInfo_request(self, request):
result = {"ptvsd": {"version": ptvsd.__version__}}
server = _channels.server()
if server is not None:
try:
pydevd_info = server.request("pydevdSystemInfo")
except Exception:
# If the server has already disconnected, or couldn't handle it,
# report what we've got.
pass
else:
result.update(pydevd_info)
return result
# Adapter's stdout was closed by IDE.
def disconnect(self):
terminate_on_disconnect = self._shared.terminate_on_disconnect
try:
try:
state.change("shutting_down")
except state.InvalidStateTransition:
# Either we have already received "disconnect" or "terminate" from the
# IDE and delegated it to the server, or the server dropped connection.
# Either way, everything that needed to be done is already done.
return
else:
# Can happen if the IDE was force-closed or crashed.
log.warning(
'IDE disconnected without sending "disconnect" or "terminate".'
)
server = _channels.server()
if server is None:
if terminate_on_disconnect:
# It happened before we connected to the server, so we cannot gracefully
# terminate the debuggee. Force-kill it immediately.
debuggee.terminate()
return
# Try to shut down the server gracefully, even though the adapter wasn't.
try:
server.send_request("disconnect", {
"terminateDebuggee": terminate_on_disconnect,
})
except Exception:
# The server might have already disconnected as well, or it might fail
# to handle the request. But we can't report failure to the IDE at this
# point, and it's already logged, so just move on.
pass
finally:
if terminate_on_disconnect:
# If debuggee is still there, give it some time to terminate itself,
# then force-kill. Since the IDE is gone already, and nobody is waiting
# for us to respond, there's no rush.
debuggee.terminate(after=60)
class ServerMessages(Messages):
"""Message handlers and the associated global state for the server channel.
"""
_only_allowed_while = Messages._only_allowed_while
_shared = _Shared()
_saved_messages = []
_hold_messages = True
# Generic request handler, used if there's no specific handler below.
def request(self, request):
# Do not delegate requests from the server by default. There is a security
# boundary between the server and the adapter, and we cannot trust arbitrary
# requests sent over that boundary, since they may contain arbitrary code
# that the IDE will execute - e.g. "runInTerminal". The adapter must only
# propagate requests that it knows are safe.
raise request.isnt_valid(
"Requests from the debug server to the IDE are not allowed."
)
def _hold_or_propagate(self, event):
with self._lock:
if self._hold_messages:
self._saved_messages.append(event)
else:
self._ide.propagate(event)
# Generic event handler, used if there's no specific handler below.
def event(self, event):
# NOTE: This is temporary until debug server is updated to follow
# DAP spec so we don't receive debugger events before configuration
# done is finished.
self._hold_or_propagate(event)
def initialized_event(self, event):
# NOTE: This should be suppressed from server, if we want to remove
# this then we should ensure that debug server follows DAP spec and
# also remove the 'initialized' event sent from IDE messages.
pass
@_only_allowed_while("initializing")
def process_event(self, event):
self._hold_or_propagate(event)
@_only_allowed_while("running")
def continued_event(self, event):
if self._shared.client_id not in ("visualstudio", "vsformac"):
# In visual studio any step/continue action already marks all the
# threads as running until a suspend, so, the continued is not
# needed (and can in fact break the UI in some cases -- see:
# https://github.com/microsoft/ptvsd/issues/1358).
# It is however needed in vscode -- see:
# https://github.com/microsoft/ptvsd/issues/1530.
self._ide.propagate(event)
@_only_allowed_while("configuring", "running")
def output_event(self, event):
category = event("category", "console")
if debuggee.is_capturing_output(category):
self._ide.propagate(event)
@_only_allowed_while("running")
def ptvsd_subprocess_event(self, event):
sub_pid = event("processId", int)
try:
debuggee.register_subprocess(sub_pid)
except Exception as exc:
raise event.cant_handle("{0}", exc)
self._ide.propagate(event)
def terminated_event(self, event):
# Do not propagate this, since we'll report our own.
pass
@_only_allowed_while("running")
def exited_event(self, event):
# Make sure that all "output" events are sent before "exited".
debuggee.wait_for_remaining_output()
# For "launch", the adapter will report the event itself by observing the
# debuggee process directly, allowing the exit code to be captured more
# accurately. Thus, there's no need to propagate it in that case.
if self._shared.start_method == "attach":
self._ide.propagate(event)
# Socket was closed by the server.
def disconnect(self):
log.info("Debug server disconnected.")
_channels.close_server()
# The debuggee process should exit shortly after it has disconnected, but just
# in case it gets stuck, don't wait forever, and force-kill it if needed.
debuggee.terminate(after=5)
try:
state.change("shutting_down")
except state.InvalidStateTransition:
# The IDE has either disconnected already, or requested "disconnect".
pass
# Make sure that all "output" events are sent before "terminated".
debuggee.wait_for_remaining_output()
# Let the IDE know that we're not debugging anymore.
self._ide.send_event("terminated")
def release_events(self):
# NOTE: This is temporary until debug server is updated to follow
# DAP spec so we don't receive debugger events before configuration
# done is finished.
with self._lock:
self._hold_messages = False
for e in self._saved_messages:
self._ide.propagate(e)
_channels = channels.Channels()

View file

@ -9,8 +9,5 @@ or configuartion files.
"""
host = "127.0.0.1"
"""Default host name for the debug server"""
port = 5678
"""Default port for the debug server"""
log_stderr = False
"""Whether detailed logs are written to stderr."""

146
src/ptvsd/adapter/server.py Normal file
View file

@ -0,0 +1,146 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
from ptvsd.adapter import components
class Server(components.Component):
"""Handles the debug server side of a debug session."""
message_handler = components.Component.message_handler
class Capabilities(components.Capabilities):
PROPERTIES = {
"supportsCompletionsRequest": False,
"supportsConditionalBreakpoints": False,
"supportsConfigurationDoneRequest": False,
"supportsDataBreakpoints": False,
"supportsDelayedStackTraceLoading": False,
"supportsDisassembleRequest": False,
"supportsEvaluateForHovers": False,
"supportsExceptionInfoRequest": False,
"supportsExceptionOptions": False,
"supportsFunctionBreakpoints": False,
"supportsGotoTargetsRequest": False,
"supportsHitConditionalBreakpoints": False,
"supportsLoadedSourcesRequest": False,
"supportsLogPoints": False,
"supportsModulesRequest": False,
"supportsReadMemoryRequest": False,
"supportsRestartFrame": False,
"supportsRestartRequest": False,
"supportsSetExpression": False,
"supportsSetVariable": False,
"supportsStepBack": False,
"supportsStepInTargetsRequest": False,
"supportsTerminateDebuggee": False,
"supportsTerminateRequest": False,
"supportsTerminateThreadsRequest": False,
"supportsValueFormattingOptions": False,
"exceptionBreakpointFilters": [],
"additionalModuleColumns": [],
"supportedChecksumAlgorithms": [],
}
def __init__(self, session, stream):
super(Server, self).__init__(session, stream)
self.pid = None
"""Process ID of the debuggee process, as reported by the server."""
assert not session.server
session.server = self
def initialize(self, request):
assert request.is_request("initialize")
request = self.channel.propagate(request)
request.wait_for_response()
self.capabilities = self.Capabilities(self, request.response)
# Generic request handler, used if there's no specific handler below.
@message_handler
def request(self, request):
# Do not delegate requests from the server by default. There is a security
# boundary between the server and the adapter, and we cannot trust arbitrary
# requests sent over that boundary, since they may contain arbitrary code
# that the IDE will execute - e.g. "runInTerminal". The adapter must only
# propagate requests that it knows are safe.
raise request.isnt_valid(
"Requests from the debug server to the IDE are not allowed."
)
# Generic event handler, used if there's no specific handler below.
@message_handler
def event(self, event):
self.ide.propagate_after_start(event)
@message_handler
def initialized_event(self, event):
# pydevd doesn't send it, but the adapter will send its own in any case.
pass
@message_handler
def process_event(self, event):
self.pid = event("systemProcessId", int)
if self.launcher:
assert self.session.pid is not None
else:
assert self.session.pid is None
if self.session.pid is not None and self.session.pid != self.pid:
event.cant_handle(
'"process" event mismatch: launcher reported "systemProcessId":{0}, '
'but server reported "systemProcessId":{1}',
self.session.pid,
self.pid,
)
else:
self.session.pid = self.pid
# If there is a launcher, it's handling the process event.
if not self.launcher:
self.ide.propagate_after_start(event)
@message_handler
def continued_event(self, event):
# https://github.com/microsoft/ptvsd/issues/1530
#
# DAP specification says that a step request implies that only the thread on
# which that step occurred is resumed for the duration of the step. However,
# for VS compatibility, pydevd can operate in a mode that resumes all threads
# instead. This is set according to the value of "steppingResumesAllThreads"
# in "launch" or "attach" request, which defaults to true. If explicitly set
# to false, pydevd will only resume the thread that was stepping.
#
# To ensure that the IDE is aware that other threads are getting resumed in
# that mode, pydevd sends a "continued" event with "allThreadsResumed": true.
# when responding to a step request. This ensures correct behavior in VSCode
# and other DAP-conformant clients.
#
# On the other hand, VS does not follow the DAP specification in this regard.
# When it requests a step, it assumes that all threads will be resumed, and
# does not expect to see "continued" events explicitly reflecting that fact.
# If such events are sent regardless, VS behaves erratically. Thus, we have
# to suppress them specifically for VS.
if self.ide.client_id not in ("visualstudio", "vsformac"):
self.ide.propagate_after_start(event)
@message_handler
def output_event(self, event):
# If there is a launcher, it's handling the output.
if not self.launcher:
self.ide.propagate_after_start(event)
@message_handler
def exited_event(self, event):
# If there is a launcher, it's handling the exit code.
if not self.launcher:
self.ide.propagate_after_start(event)
@message_handler
def terminated_event(self, event):
# Do not propagate this, since we'll report our own.
pass

View file

@ -0,0 +1,391 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
import contextlib
import itertools
import os
import subprocess
import sys
import threading
import time
import ptvsd
import ptvsd.launcher
from ptvsd.common import compat, fmt, log, messaging, options as common_options, sockets, util
from ptvsd.adapter import components, ide, launcher, options as adapter_options, server
class Session(util.Observable):
"""A debug session involving an IDE, an adapter, a launcher, and a debug server.
The IDE and the adapter are always present, and at least one of launcher and debug
server is present, depending on the scenario.
"""
INCOMING_CONNECTION_TIMEOUT = 10
_counter = itertools.count(1)
def __init__(self):
super(Session, self).__init__()
self.lock = threading.RLock()
self.id = next(self._counter)
self._changed_condition = threading.Condition(self.lock)
self.ide = components.missing(self, ide.IDE)
"""The IDE component. Always present."""
self.launcher = components.missing(self, launcher.Launcher)
"""The launcher componet. Always present in "launch" sessions, and never
present in "attach" sessions.
"""
self.server = components.missing(self, server.Server)
"""The debug server component. Always present, unless this is a "launch"
session with "noDebug".
"""
self.no_debug = None
"""Whether this is a "noDebug" session."""
self.pid = None
"""Process ID of the debuggee process."""
self.debug_options = {}
"""Debug options as specified by "launch" or "attach" request."""
self.is_finalizing = False
"""Whether the session is inside finalize()."""
self.observers += [lambda *_: self.notify_changed()]
def __str__(self):
return fmt("Session-{0}", self.id)
def __enter__(self):
"""Lock the session for exclusive access."""
self.lock.acquire()
return self
def __exit__(self, exc_type, exc_value, exc_tb):
"""Unlock the session."""
self.lock.release()
def wait_for_completion(self):
self.ide.channel.wait()
if self.launcher:
self.launcher.channel.wait()
if self.server:
self.server.channel.wait()
def notify_changed(self):
with self:
self._changed_condition.notify_all()
def wait_for(self, predicate, timeout=None):
"""Waits until predicate() becomes true.
The predicate is invoked with the session locked. If satisfied, the method
returns immediately. Otherwise, the lock is released (even if it was held
at entry), and the method blocks waiting for some attribute of either self,
self.ide, self.server, or self.launcher to change. On every change, session
is re-locked and predicate is re-evaluated, until it is satisfied.
While the session is unlocked, message handlers for components other than
the one that is waiting can run, but message handlers for that one are still
blocked.
If timeout is not None, the method will unblock and return after that many
seconds regardless of whether the predicate was satisfied. The method returns
False if it timed out, and True otherwise.
"""
def wait_for_timeout():
time.sleep(timeout)
wait_for_timeout.timed_out = True
self.notify_changed()
wait_for_timeout.timed_out = False
if timeout is not None:
thread = threading.Thread(
target=wait_for_timeout, name="Session.wait_for() timeout"
)
thread.daemon = True
thread.start()
with self:
while not predicate():
if wait_for_timeout.timed_out:
return False
self._changed_condition.wait()
return True
def connect_to_ide(self):
"""Sets up a DAP message channel to the IDE over stdio.
"""
stream = messaging.JsonIOStream.from_stdio()
# Make sure that nothing else tries to interfere with the stdio streams
# that are going to be used for DAP communication from now on.
sys.stdout = sys.stderr
sys.stdin = open(os.devnull, "r")
ide.IDE(self, stream)
def connect_to_server(self, address):
"""Sets up a DAP message channel to the server.
The channel is established by connecting to the TCP socket listening on the
specified address
"""
host, port = address
sock = sockets.create_client()
sock.connect(address)
stream = messaging.JsonIOStream.from_socket(sock)
server.Server(self, stream)
@contextlib.contextmanager
def _accept_connection_from(self, what, address):
"""Sets up a listening socket, accepts an incoming connection on it, sets
up a message stream over that connection, and passes it on to what().
Can be used in a with-statement to obtain the actual address of the listener
socket before blocking on accept()::
with accept_connection_from_server(...) as (host, port):
# listen() returned - listening on (host, port) now
...
# accept() returned - connection established
"""
host, port = address
listener = sockets.create_server(host, port, self.INCOMING_CONNECTION_TIMEOUT)
host, port = listener.getsockname()
log.info(
"{0} waiting for incoming connection from {1} on {2}:{3}...",
self,
what.__name__,
host,
port,
)
yield host, port
try:
sock, (other_host, other_port) = listener.accept()
finally:
listener.close()
log.info(
"{0} accepted incoming connection {1} from {2}:{3}.",
self,
what.__name__,
other_host,
other_port,
)
stream = messaging.JsonIOStream.from_socket(sock, what)
what(self, stream)
def accept_connection_from_ide(self, address):
return self._accept_connection_from(ide.IDE, address)
def accept_connection_from_server(self, address=("127.0.0.1", 0)):
return self._accept_connection_from(server.Server, address)
def _accept_connection_from_launcher(self, address=("127.0.0.1", 0)):
return self._accept_connection_from(launcher.Launcher, address)
def spawn_debuggee(self, request, sudo, args, console, console_title):
cmdline = ["sudo"] if sudo else []
cmdline += [sys.executable, os.path.dirname(ptvsd.launcher.__file__)]
cmdline += args
env = {"PTVSD_SESSION_ID": str(self.id)}
def spawn_launcher():
with self._accept_connection_from_launcher() as (_, launcher_port):
env["PTVSD_LAUNCHER_PORT"] = str(launcher_port)
if common_options.log_dir is not None:
env["PTVSD_LOG_DIR"] = compat.filename(common_options.log_dir)
if adapter_options.log_stderr:
env["PTVSD_LOG_STDERR"] = "debug info warning error"
if console == "internalConsole":
# If we are talking to the IDE over stdio, sys.stdin and sys.stdout are
# redirected to avoid mangling the DAP message stream. Make sure the
# launcher also respects that.
subprocess.Popen(
cmdline,
env=dict(list(os.environ.items()) + list(env.items())),
stdin=sys.stdin,
stdout=sys.stdout,
stderr=sys.stderr,
)
else:
self.ide.capabilities.require("supportsRunInTerminalRequest")
kinds = {
"integratedTerminal": "integrated",
"externalTerminal": "external",
}
self.ide.channel.request(
"runInTerminal",
{
"kind": kinds[console],
"title": console_title,
"args": cmdline,
"env": env,
},
)
self.launcher.channel.delegate(request)
if self.no_debug:
spawn_launcher()
else:
with self.accept_connection_from_server() as (_, server_port):
request.arguments["port"] = server_port
spawn_launcher()
# Don't accept connection from server until launcher sends us the
# "process" event, to avoid a race condition between the launcher
# and the server.
if not self.wait_for(lambda: self.pid is not None, timeout=5):
raise request.cant_handle(
'Session timed out waiting for "process" event from {0}',
self.launcher,
)
def inject_server(self, pid, ptvsd_args):
with self.accept_connection_from_server() as (host, port):
cmdline = [
sys.executable,
compat.filename(os.path.dirname(ptvsd.__file__)),
"--client",
"--host",
host,
"--port",
str(port),
]
cmdline += ptvsd_args
cmdline += ["--pid", str(pid)]
log.info(
"{0} spawning attach-to-PID debugger injector: {1!r}", self, cmdline
)
try:
proc = subprocess.Popen(
cmdline,
bufsize=0,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# This process will immediately exit after injecting debug server
proc.wait()
except Exception as exc:
log.exception("{0} failed to inject debugger", self)
raise messaging.MessageHandlingError(
fmt("Failed to inject debugger: {0}", exc)
)
if proc.returncode != 0:
log.exception(
"{0} failed to inject debugger with error code {1}",
self,
proc.returncode,
)
raise messaging.MessageHandlingError(
fmt(
"Failed to inject debugger with error code {0}", proc.returncode
)
)
def finalize(self, why, terminate_debuggee=False):
"""Finalizes the debug session.
If the server is present, sends "disconnect" request with "terminateDebuggee"
set as specified) request to it; waits for it to disconnect, allowing any
remaining messages from it to be handled; and closes the server channel.
If the launcher is present, sends "terminate" request to it, regardless of the
value of terminate; waits for it to disconnect, allowing any remaining messages
from it to be handled; and closes the launcher channel.
If the IDE is present, sends "terminated" event to it.
"""
if self.is_finalizing:
return
self.is_finalizing = True
log.info("{0}; finalizing {1}.", why, self)
try:
self._finalize(why, terminate_debuggee)
except Exception:
# Finalization should never fail, and if it does, the session is in an
# indeterminate and likely unrecoverable state, so just fail fast.
log.exception("Fatal error while finalizing {0}", self)
os._exit(1)
log.info("{0} finalized.", self)
def _finalize(self, why, terminate_debuggee):
if self.server and self.server.is_connected:
try:
self.server.channel.request(
"disconnect", {"terminateDebuggee": terminate_debuggee}
)
except Exception:
pass
try:
self.server.channel.close()
except Exception:
log.exception()
# Wait until the server message queue fully drains - there won't be any
# more events after close(), but there may still be pending responses.
log.info("{0} waiting for {1} to disconnect...", self, self.server)
if not self.wait_for(lambda: not self.server.is_connected, timeout=5):
log.warning(
"{0} timed out waiting for {1} to disconnect.", self, self.server
)
if self.launcher and self.launcher.is_connected:
# If there was a server, we just disconnected from it above, which should
# cause the debuggee process to exit - so let's wait for that first.
if self.server:
log.info('{0} waiting for "exited" event...', self)
if not self.wait_for(
lambda: self.launcher.exit_code is not None, timeout=5
):
log.warning('{0} timed out waiting for "exited" event.', self)
# Terminate the debuggee process if it's still alive for any reason -
# whether it's because there was no server to handle graceful shutdown,
# or because the server couldn't handle it for some reason.
try:
self.launcher.channel.request("terminate")
except Exception:
pass
# Wait until the launcher message queue fully drains.
log.info("{0} waiting for {1} to disconnect...", self, self.launcher)
if not self.wait_for(lambda: not self.launcher.is_connected, timeout=5):
log.warning(
"{0} timed out waiting for {1} to disconnect.", self, self.launcher
)
try:
self.launcher.channel.close()
except Exception:
log.exception()
# Tell the IDE that debugging is over, but don't close the channel until it
# tells us to, via the "disconnect" request.
if self.ide.is_connected:
try:
self.ide.channel.send_event("terminated")
except Exception:
pass

View file

@ -1,58 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
"""Tracks the overall state of the adapter, and enforces valid state transitions.
"""
from ptvsd.common import fmt, log, singleton
# Order defines valid transitions.
STATES = (
"starting", # before "initialize" is received
"initializing", # until "initialized" is sent
"configuring", # until "configurationDone" is received
"running", # until "disconnect" or "terminate" is received
"running_nodebug", # until "disconnect" or "terminate" is received
"shutting_down", # until the adapter process exits
)
class InvalidStateTransition(RuntimeError):
pass
class State(singleton.ThreadSafeSingleton):
_state = STATES[0]
@property
@singleton.autolocked_method
def state(self):
"""Returns the current state.
"""
return self._state
@state.setter
@singleton.autolocked_method
def state(self, new_state):
"""Transitions to the new state, or raises InvalidStateTransition if the
state transition is not legal.
"""
state = self._state
if STATES.index(state) >= STATES.index(new_state):
raise InvalidStateTransition(
fmt("Cannot change adapter state from {0!r} to {1!r}", state, new_state)
)
log.debug("Adapter state changed from {0!r} to {1!r}", state, new_state)
self._state = new_state
def current():
return State().state
def change(new_state):
State().state = new_state

View file

@ -1,103 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
__all__ = ["main"]
import os
import os.path
import socket
import subprocess
import sys
# Force absolute path on Python 2.
__file__ = os.path.abspath(__file__)
WAIT_ON_NORMAL_SWITCH = "--wait-on-normal"
WAIT_ON_ABNORMAL_SWITCH = "--wait-on-abnormal"
INTERNAL_PORT_SWITCH = "--internal-port"
_wait_on_normal_exit = False
_wait_on_abnormal_exit = False
_internal_pid_server_port = None
HELP = """Usage: launcher [{normal}] [{abnormal}] <args>
python launcher.py {normal} {abnormal} -- <python args go here>
""".format(
normal=WAIT_ON_NORMAL_SWITCH, abnormal=WAIT_ON_ABNORMAL_SWITCH
)
def main(argv=sys.argv):
try:
process_args = [sys.executable] + list(parse(argv[1:]))
except Exception as ex:
print(HELP + "\nError: " + str(ex), file=sys.stderr)
sys.exit(2)
p = subprocess.Popen(args=process_args)
_send_pid(p.pid)
exit_code = p.wait()
if _wait_on_normal_exit and exit_code == 0:
_wait_for_user()
elif _wait_on_abnormal_exit and exit_code != 0:
_wait_for_user()
sys.exit(exit_code)
def _wait_for_user():
if sys.__stdout__ and sys.__stdin__:
try:
import msvcrt
except ImportError:
sys.__stdout__.write("Press Enter to continue . . . ")
sys.__stdout__.flush()
sys.__stdin__.read(1)
else:
sys.__stdout__.write("Press any key to continue . . . ")
sys.__stdout__.flush()
msvcrt.getch()
def parse_arg(arg, it):
if arg == WAIT_ON_NORMAL_SWITCH:
global _wait_on_normal_exit
_wait_on_normal_exit = True
elif arg == WAIT_ON_ABNORMAL_SWITCH:
global _wait_on_abnormal_exit
_wait_on_abnormal_exit = True
elif arg == INTERNAL_PORT_SWITCH:
global _internal_pid_server_port
_internal_pid_server_port = int(next(it))
else:
raise AssertionError("Invalid argument passed to launcher.")
def parse(argv):
it = iter(argv)
arg = next(it)
while arg != "--":
parse_arg(arg, it)
arg = next(it)
return it
def _send_pid(pid):
assert _internal_pid_server_port is not None
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect(("127.0.0.1", _internal_pid_server_port))
sock.sendall(b"%d" % pid)
finally:
sock.close()
if __name__ == "__main__":
main()

View file

@ -4,6 +4,7 @@
from __future__ import absolute_import, print_function, unicode_literals
import contextlib
import functools
import inspect
import io
@ -23,7 +24,7 @@ LEVELS = ("debug", "info", "warning", "error")
stderr = sys.__stderr__
stderr_levels = {"warning", "error"}
stderr_levels = set(os.getenv("PTVSD_LOG_STDERR", "warning error").split())
"""What should be logged to stderr.
"""
@ -68,6 +69,7 @@ def write(level, text):
format_string = "{0}+{1:" + timestamp_format + "}: "
prefix = fmt(format_string, level[0].upper(), t)
text = getattr(_tls, "prefix", "") + text
indent = "\n" + (" " * len(prefix))
output = indent.join(text.split("\n"))
output = prefix + output + "\n\n"
@ -207,6 +209,20 @@ def filename():
return _filename
@contextlib.contextmanager
def prefixed(format_string, *args, **kwargs):
"""Adds a prefix to all messages logged from the current thread for the duration
of the context manager.
"""
prefix = fmt(format_string, *args, **kwargs)
old_prefix = getattr(_tls, "prefix", "")
_tls.prefix = prefix + old_prefix
try:
yield
finally:
_tls.prefix = old_prefix
def describe_environment(header):
import multiprocessing
import sysconfig

View file

@ -16,6 +16,7 @@ import contextlib
import functools
import itertools
import os
import socket
import sys
import threading
@ -23,24 +24,39 @@ from ptvsd.common import compat, fmt, json, log
from ptvsd.common.compat import unicode
class NoMoreMessages(EOFError):
"""Indicates that there are no more messages to be read from the stream.
class JsonIOError(IOError):
"""Indicates that a read or write operation on JsonIOStream has failed.
"""
def __init__(self, *args, **kwargs):
stream = kwargs.pop("stream")
args = args if len(args) else ["No more messages"]
super(NoMoreMessages, self).__init__(*args, **kwargs)
cause = kwargs.pop("cause", None)
if not len(args) and cause is not None:
args = [str(cause)]
super(JsonIOError, self).__init__(*args, **kwargs)
self.stream = stream
"""The stream that doesn't have any more messages.
"""The stream that couldn't be read or written.
Set by JsonIOStream.read_json().
Set by JsonIOStream.read_json() and JsonIOStream.write_json().
JsonMessageChannel relies on this value to decide whether a NoMoreMessages
instance that bubbles up to the message loop is related to that loop.
"""
self.cause = cause
"""The underlying exception, if any."""
class NoMoreMessages(JsonIOError, EOFError):
"""Indicates that there are no more messages that can be read from or written
to a stream.
"""
def __init__(self, *args, **kwargs):
args = args if len(args) else ["No more messages"]
super(NoMoreMessages, self).__init__(*args, **kwargs)
class JsonIOStream(object):
"""Implements a JSON value stream over two byte streams (input and output).
@ -99,22 +115,30 @@ class JsonIOStream(object):
return cls(reader, writer, name)
@classmethod
def from_socket(cls, socket, name=None):
def from_socket(cls, sock, name=None):
"""Creates a new instance that sends and receives messages over a socket.
"""
socket.settimeout(None) # make socket blocking
sock.settimeout(None) # make socket blocking
if name is None:
name = repr(socket)
name = repr(sock)
# TODO: investigate switching to buffered sockets; readline() on unbuffered
# sockets is very slow! Although the implementation of readline() itself is
# native code, it calls read(1) in a loop - and that then ultimately calls
# SocketIO.readinto(), which is implemented in Python.
socket_io = socket.makefile("rwb", 0)
socket_io = sock.makefile("rwb", 0)
return cls(socket_io, socket_io, name)
# SocketIO.close() doesn't close the underlying socket.
def cleanup():
try:
sock.shutdown(socket.SHUT_RDWR)
except Exception:
pass
sock.close()
def __init__(self, reader, writer, name=None):
return cls(socket_io, socket_io, name, cleanup)
def __init__(self, reader, writer, name=None, cleanup=lambda: None):
"""Creates a new JsonIOStream.
reader must be a BytesIO-like object, from which incoming messages will be
@ -123,6 +147,9 @@ class JsonIOStream(object):
writer must be a BytesIO-like object, into which outgoing messages will be
written by write_json().
cleanup must be a callable; it will be invoked without arguments when the
stream is closed.
reader.readline() must treat "\n" as the line terminator, and must leave "\r"
as is - it must not replace "\r\n" with "\n" automatically, as TextIO does.
"""
@ -133,22 +160,38 @@ class JsonIOStream(object):
self.name = name
self._reader = reader
self._writer = writer
self._is_closing = False
self._cleanup = cleanup
self._closed = False
def close(self):
"""Closes the stream, the reader, and the writer.
"""
self._is_closing = True
# Close the writer first, so that the other end of the connection has its
# message loop waiting on read() unblocked. If there is an exception while
# closing the writer, we still want to try to close the reader - only one
# exception can bubble up, so if both fail, it'll be the one from reader.
if self._closed:
return
self._closed = True
log.debug("Closing {0} message stream", self.name)
try:
self._writer.close()
finally:
if self._reader is not self._writer:
self._reader.close()
try:
# Close the writer first, so that the other end of the connection has
# its message loop waiting on read() unblocked. If there is an exception
# while closing the writer, we still want to try to close the reader -
# only one exception can bubble up, so if both fail, it'll be the one
# from reader.
try:
self._writer.close()
finally:
if self._reader is not self._writer:
self._reader.close()
finally:
self._cleanup()
except Exception:
# On Python 2, close() will raise an exception if there is a concurrent
# read() or write(), which is a common and expected occurrence with
# JsonMessageChannel, so don't even bother logging it.
if sys.version_info >= (3,):
raise log.exception("Error while closing {0} message stream", self.name)
def _log_message(self, dir, data, logger=log.debug):
format_string = "{0} {1} " + (
@ -265,6 +308,11 @@ class JsonIOStream(object):
Value is written as encoded by encoder.encode().
"""
if self._closed:
# Don't log this - it's a common pattern to write to a stream while
# anticipating EOFError from it in case it got closed concurrently.
raise NoMoreMessages(stream=self)
encoder = encoder if encoder is not None else self.json_encoder_factory()
writer = self._writer
@ -294,8 +342,9 @@ class JsonIOStream(object):
break
data_written += written
writer.flush()
except Exception:
raise self._log_message("<--", value, logger=log.exception)
except Exception as exc:
self._log_message("<--", value, logger=log.exception)
raise JsonIOError(stream=self, cause=exc)
self._log_message("<--", value)
@ -368,7 +417,7 @@ class MessageDict(collections.OrderedDict):
if not validate:
validate = lambda x: x
elif isinstance(validate, type) or isinstance(validate, tuple):
validate = json.of_type(validate)
validate = json.of_type(validate, optional=optional)
elif not callable(validate):
validate = json.default(validate)
@ -444,10 +493,11 @@ class Message(object):
return fmt("{0!j}", self.json) if self.json is not None else repr(self)
def describe(self):
"""A brief description of the message that is enough to identify its handler,
but does not include its payload or metadata that uniquely identifies it.
"""A brief description of the message that is enough to identify it.
Examples: 'request "launch"', 'response to request "launch"'.
Examples:
'#1 request "launch" from IDE'
'#2 response to #1 request "launch" from IDE'.
"""
raise NotImplementedError
@ -508,10 +558,11 @@ class Message(object):
cause = None
exc_type, format_string = args[0:2]
args = args[2:]
assert issubclass(exc_type, MessageHandlingError)
silent = kwargs.pop("silent", False)
reason = fmt(format_string, *args, **kwargs)
exc = exc_type(reason, cause) # will log it
exc = exc_type(reason, cause, silent) # will log it
if isinstance(cause, Request):
cause.respond(exc)
@ -573,7 +624,7 @@ class Event(Message):
self.body = body
def describe(self):
return fmt("event {0!j}", self.event)
return fmt("#{0} event {1!j} from {2}", self.seq, self.event, self.channel)
@property
def payload(self):
@ -591,30 +642,27 @@ class Event(Message):
channel = self.channel
handler = channel._get_handler_for("event", self.event)
try:
result = handler(self)
assert result is None, fmt(
"Handler {0} tried to respond to {1}.",
compat.srcnameof(handler),
self.describe(),
)
except MessageHandlingError as exc:
if not exc.applies_to(self):
raise
log.error(
"Handler {0} couldn't handle {1} in channel {2}:\n\n{3}\n\n{4}",
compat.srcnameof(handler),
self.describe(),
self.channel,
str(exc),
self,
)
try:
result = handler(self)
assert result is None, fmt(
"Handler {0} tried to respond to {1}.",
compat.srcnameof(handler),
self.describe(),
)
except MessageHandlingError as exc:
if not exc.applies_to(self):
raise
log.error(
"Handler {0}\ncouldn't handle {1}:\n{2}",
compat.srcnameof(handler),
self.describe(),
str(exc),
)
except Exception:
raise log.exception(
"Handler {0} couldn't handle {1} in channel {2}:\n\n{3}\n\n",
"Handler {0}\ncouldn't handle {1}:",
compat.srcnameof(handler),
self.describe(),
self.channel,
self,
)
@ -681,7 +729,7 @@ class Request(Message):
"""
def describe(self):
return fmt("request {0!j}", self.command)
return fmt("#{0} request {1!j} from {2}", self.seq, self.command, self.channel)
@property
def payload(self):
@ -731,12 +779,10 @@ class Request(Message):
raise
result = exc
log.error(
"Handler {0} couldn't handle {1} in channel {2}:\n\n{3}\n\n{4}",
"Handler {0}\ncouldn't handle {1}:\n{2}",
compat.srcnameof(handler),
self.describe(),
self.channel,
str(exc),
self,
)
if result is NO_RESPONSE:
@ -760,15 +806,20 @@ class Request(Message):
compat.srcnameof(handler),
self.describe(),
)
self.respond(result)
try:
self.respond(result)
except NoMoreMessages:
log.warning(
"Channel was closed before the response from handler {0} to {1} could be sent",
compat.srcnameof(handler),
self.describe(),
)
except Exception:
raise log.exception(
"Handler {0} couldn't handle {1} in channel {2}:\n\n{3}\n\n",
"Handler {0}\ncouldn't handle {1}:",
compat.srcnameof(handler),
self.describe(),
self.channel,
self,
)
@ -783,6 +834,9 @@ class OutgoingRequest(Request):
super(OutgoingRequest, self).__init__(channel, seq, command, arguments)
self._response_handlers = []
def describe(self):
return fmt("#{0} request {1!j} to {2}", self.seq, self.command, self.channel)
def wait_for_response(self, raise_if_failed=True):
"""Waits until a response is received for this request, records the Response
object for it in self.response, and returns response.body.
@ -833,16 +887,22 @@ class OutgoingRequest(Request):
def run_handlers():
for handler in handlers:
try:
handler(response)
except MessageHandlingError as exc:
if not exc.applies_to(self):
raise
# Detailed exception info was already logged by its constructor.
log.error(
"Handler {0} couldn't handle {1}:\n\n{2}",
try:
handler(response)
except MessageHandlingError as exc:
if not exc.applies_to(response):
raise
log.error(
"Handler {0}\ncouldn't handle {1}:\n{2}",
compat.srcnameof(handler),
response.describe(),
str(exc),
)
except Exception:
raise log.exception(
"Handler {0}\ncouldn't handle {1}:",
compat.srcnameof(handler),
self.describe(),
str(exc),
response.describe(),
)
handlers = self._response_handlers[:]
@ -901,7 +961,7 @@ class Response(Message):
"""
def describe(self):
return fmt("response to request {0!j}", self.request.command)
return fmt("#{0} response to {1}", self.seq, self.request.describe())
@property
def payload(self):
@ -966,6 +1026,18 @@ class Response(Message):
)
class Disconnect(Message):
"""A dummy message used to represent disconnect. It's always the last message
received from any channel.
"""
def __init__(self, channel):
super(Disconnect, self).__init__(channel, None)
def describe(self):
return fmt("disconnect from {0}", self.channel)
class MessageHandlingError(Exception):
"""Indicates that a message couldn't be handled for some reason.
@ -1067,7 +1139,7 @@ class MessageHandlingError(Exception):
"""Propagates this error, raising a new instance of the same class with the
same reason, but a different cause.
"""
raise type(self)(self.reason, new_cause)
raise type(self)(self.reason, new_cause, silent=True)
class InvalidMessageError(MessageHandlingError):
@ -1324,7 +1396,7 @@ class JsonMessageChannel(object):
assert not len(self._sent_requests)
self._enqueue_handlers("disconnect", self._handle_disconnect)
self._enqueue_handlers(Disconnect(self), self._handle_disconnect)
self.close()
_message_parsers = {
@ -1391,8 +1463,7 @@ class JsonMessageChannel(object):
def _enqueue_handlers(self, what, *handlers):
"""Enqueues handlers for _run_handlers() to run.
`what` describes what is being handled, and is used for logging purposes.
Normally it's a Message instance, but it can be anything printable.
`what` is the Message being handled, and is used for logging purposes.
If the background thread with _run_handlers() isn't running yet, starts it.
"""
@ -1458,14 +1529,13 @@ class JsonMessageChannel(object):
if closed and handler in (Event._handle, Request._handle):
continue
try:
handler()
except Exception:
log.exception(
"Fatal error in channel {0} while handling {1}:", self, what
)
self.close()
os._exit(1)
with log.prefixed("[handling {0}]\n", what.describe()):
try:
handler()
except Exception:
# It's already logged by the handler, so just fail fast.
self.close()
os._exit(1)
def _get_handler_for(self, type, name):
"""Returns the handler for a message of a given type.
@ -1479,7 +1549,7 @@ class JsonMessageChannel(object):
raise AttributeError(
fmt(
"channel {0} has no handler for {1} {2!r}",
"Channel {0} has no handler for {1} {2!r}",
compat.srcnameof(self.handlers),
type,
name,
@ -1491,12 +1561,11 @@ class JsonMessageChannel(object):
try:
handler()
except Exception:
log.exception(
"Handler {0} couldn't handle disconnect in channel {1}:",
raise log.exception(
"Handler {0}\ncouldn't handle disconnect from {1}:",
compat.srcnameof(handler),
self.channel,
self,
)
os._exit(1)
class MessageHandlers(object):

View file

@ -31,3 +31,17 @@ def evaluate(code, path=__file__, mode="eval"):
# We use the path internally to skip exception inside the debugger.
expr = compile(code, path, "eval")
return eval(expr, {}, sys.modules)
class Observable(object):
"""An object with change notifications."""
def __init__(self):
self.observers = []
def __setattr__(self, name, value):
try:
return super(Observable, self).__setattr__(name, value)
finally:
for ob in self.observers:
ob(self, name)

View file

@ -0,0 +1,12 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
__all__ = []
import os.path
# Force absolute path on Python 2.
__file__ = os.path.abspath(__file__)

View file

@ -0,0 +1,72 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, division, print_function, unicode_literals
__all__ = ["main"]
import locale
import os
import sys
# WARNING: ptvsd and submodules must not be imported on top level in this module,
# and should be imported locally inside main() instead.
# Force absolute path on Python 2.
__file__ = os.path.abspath(__file__)
def main():
from ptvsd.common import log
from ptvsd.launcher import adapter
log.filename_prefix = "ptvsd.launcher"
log.to_file()
log.describe_environment("ptvsd.launcher startup environment:")
def option(name, type, *args):
try:
return type(os.environ.pop(name, *args))
except Exception:
raise log.exception("Error parsing {0!r}:", name)
session_id = option("PTVSD_SESSION_ID", int)
launcher_port = option("PTVSD_LAUNCHER_PORT", int)
adapter.connect(session_id, launcher_port)
adapter.channel.wait()
if __name__ == "__main__":
# ptvsd can also be invoked directly rather than via -m. In this case, the first
# entry on sys.path is the one added automatically by Python for the directory
# containing this file. This means that import ptvsd will not work, since we need
# the parent directory of ptvsd/ to be in sys.path, rather than ptvsd/launcher/.
#
# The other issue is that many other absolute imports will break, because they
# will be resolved relative to ptvsd/launcher/ - e.g. `import state` will then try
# to import ptvsd/launcher/state.py.
#
# To fix both, we need to replace the automatically added entry such that it points
# at parent directory of ptvsd/ instead of ptvsd/launcher, import ptvsd with that
# in sys.path, and then remove the first entry entry altogether, so that it doesn't
# affect any further imports we might do. For example, suppose the user did:
#
# python /foo/bar/ptvsd/launcher ...
#
# At the beginning of this script, sys.path will contain "/foo/bar/ptvsd/launcher"
# as the first entry. What we want is to replace it with "/foo/bar', then import
# ptvsd with that in effect, and then remove the replaced entry before any more
# code runs. The imported ptvsd module will remain in sys.modules, and thus all
# future imports of it or its submodules will resolve accordingly.
if "ptvsd" not in sys.modules:
# Do not use dirname() to walk up - this can be a relative path, e.g. ".".
sys.path[0] = sys.path[0] + "/../../"
__import__("ptvsd")
del sys.path[0]
# Load locale settings.
locale.setlocale(locale.LC_ALL, "")
main()

View file

@ -0,0 +1,148 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, division, print_function, unicode_literals
import functools
import os
import platform
import sys
import ptvsd
from ptvsd.common import compat, fmt, json, messaging, sockets
from ptvsd.common.compat import unicode
from ptvsd.launcher import debuggee
channel = None
"""DAP message channel to the adapter."""
def connect(session_id, launcher_port):
global channel
assert channel is None
sock = sockets.create_client()
sock.connect(("127.0.0.1", launcher_port))
stream = messaging.JsonIOStream.from_socket(sock, fmt("Adapter-{0}", session_id))
channel = messaging.JsonMessageChannel(stream, handlers=Handlers())
channel.start()
class Handlers(object):
def launch_request(self, request):
debug_options = set(request("debugOptions", json.array(unicode)))
# Handling of properties that can also be specified as legacy "debugOptions" flags.
# If property is explicitly set to false, but the flag is in "debugOptions", treat
# it as an error.
def property_or_debug_option(prop_name, flag_name):
assert prop_name[0].islower() and flag_name[0].isupper()
value = request(prop_name, json.default(flag_name in debug_options))
if value is False and flag_name in debug_options:
raise request.isnt_valid(
'{0!r}:false and "debugOptions":[{1!r}] are mutually exclusive',
prop_name,
flag_name,
)
return value
cmdline = []
if property_or_debug_option("sudo", "Sudo"):
if platform.system() == "Windows":
raise request.cant_handle('"sudo":true is not supported on Windows.')
else:
cmdline += ["sudo"]
# "pythonPath" is a deprecated legacy spelling. If "python" is missing, then try
# the alternative. But if both are missing, the error message should say "python".
python_key = "python"
if python_key in request:
if "pythonPath" in request:
raise request.isnt_valid(
'"pythonPath" is not valid if "python" is specified'
)
elif "pythonPath" in request:
python_key = "pythonPath"
python = request(python_key, json.array(unicode, vectorize=True, size=(1,)))
if not len(python):
python = [compat.filename(sys.executable)]
cmdline += python
if not request("noDebug", json.default(False)):
port = request("port", int)
ptvsd_args = request("ptvsdArgs", json.array(unicode))
cmdline += [
compat.filename(os.path.dirname(ptvsd.__file__)),
"--client",
"--host",
"127.0.0.1",
"--port",
str(port),
] + ptvsd_args
program = module = code = ()
if "program" in request:
program = request("program", json.array(unicode, vectorize=True, size=(1,)))
cmdline += program
process_name = program[0]
if "module" in request:
module = request("module", json.array(unicode, vectorize=True, size=(1,)))
cmdline += ["-m"] + module
process_name = module[0]
if "code" in request:
code = request("code", json.array(unicode, vectorize=True, size=(1,)))
cmdline += ["-c"] + code
process_name = python[0]
num_targets = len([x for x in (program, module, code) if x != ()])
if num_targets == 0:
raise request.isnt_valid(
'either "program", "module", or "code" must be specified'
)
elif num_targets != 1:
raise request.isnt_valid(
'"program", "module", and "code" are mutually exclusive'
)
cmdline += request("args", json.array(unicode))
cwd = request("cwd", unicode, optional=True)
if cwd == ():
# If it's not specified, but we're launching a file rather than a module,
# and the specified path has a directory in it, use that.
cwd = None if program == () else (os.path.dirname(program) or None)
env = os.environ.copy()
env.update(request("env", json.object(unicode)))
redirect_output = "RedirectOutput" in debug_options
if redirect_output:
# sys.stdout buffering must be disabled - otherwise we won't see the output
# at all until the buffer fills up.
env["PYTHONUNBUFFERED"] = "1"
if property_or_debug_option("waitOnNormalExit", "WaitOnNormalExit"):
debuggee.wait_on_exit_predicates.append(lambda code: code == 0)
if property_or_debug_option("waitOnAbnormalExit", "WaitOnAbnormalExit"):
debuggee.wait_on_exit_predicates.append(lambda code: code != 0)
if sys.version_info < (3,):
# Popen() expects command line and environment to be bytes, not Unicode.
# Assume that values are filenames - it's usually either that, or numbers -
# but don't allow encoding to fail if we guessed wrong.
encode = functools.partial(compat.filename_bytes, errors="replace")
cmdline = [encode(s) for s in cmdline]
env = {encode(k): encode(v) for k, v in env.items()}
debuggee.spawn(process_name, cmdline, cwd, env, redirect_output)
return {}
def terminate_request(self, request):
request.respond({})
debuggee.kill()
def disconnect(self):
debuggee.kill()

View file

@ -0,0 +1,156 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, division, print_function, unicode_literals
import atexit
import locale
import os
import struct
import subprocess
import sys
import threading
from ptvsd.common import fmt, log, messaging
process = None
"""subprocess.Popen instance for the debuggee process."""
wait_on_exit_predicates = []
"""List of functions that determine whether to pause after debuggee process exits.
Every function is invoked with exit code as the argument. If any of the functions
returns True, the launcher pauses and waits for user input before exiting.
"""
def describe():
return fmt("debuggee process with PID={0}", process.pid)
def spawn(process_name, cmdline, cwd, env, redirect_output):
from ptvsd.launcher import adapter, output
log.info(
"Spawning debuggee process:\n\n"
"Current directory: {0!j}\n\n"
"Command line: {1!j}\n\n"
"Environment variables: {2!j}\n\n",
cwd,
cmdline,
env,
)
close_fds = set()
try:
if redirect_output:
# subprocess.PIPE behavior can vary substantially depending on Python version
# and platform; using our own pipes keeps it simple, predictable, and fast.
stdout_r, stdout_w = os.pipe()
stderr_r, stderr_w = os.pipe()
close_fds |= {stdout_r, stdout_w, stderr_r, stderr_w}
kwargs = dict(stdout=stdout_w, stderr=stderr_w)
else:
kwargs = {}
try:
global process
process = subprocess.Popen(cmdline, cwd=cwd, env=env, bufsize=0, **kwargs)
except Exception as exc:
raise messaging.Message.cant_handle(
"Couldn't spawn debuggee: {0}\n\nCommand line:{1!r}", exc, cmdline
)
log.info("Spawned {0}.", describe())
atexit.register(kill)
adapter.channel.send_event(
"process",
{
"startMethod": "launch",
"isLocalProcess": True,
"systemProcessId": process.pid,
"name": process_name,
"pointerSize": struct.calcsize("P") * 8,
},
)
if redirect_output:
encoding = env.get("PYTHONIOENCODING", locale.getpreferredencoding())
for category, fd, tee in [
("stdout", stdout_r, sys.stdout),
("stderr", stderr_r, sys.stderr),
]:
output.CaptureOutput(category, fd, tee.fileno(), encoding)
close_fds.remove(fd)
wait_thread = threading.Thread(target=wait_for_exit, name="wait_for_exit()")
wait_thread.daemon = True
wait_thread.start()
finally:
for fd in close_fds:
try:
os.close(fd)
except Exception:
log.exception()
def kill():
if process is None:
return
try:
if process.poll() is None:
log.info("Killing {0}", describe())
process.kill()
except Exception:
log.exception("Failed to kill {0}", describe())
def wait_for_exit():
from ptvsd.launcher import adapter, output
try:
code = process.wait()
except Exception:
log.exception("Couldn't determine process exit code:")
code = -1
log.info("{0} exited with code {1}", describe(), code)
output.wait_for_remaining_output()
try:
adapter.channel.send_event("exited", {"exitCode": code})
except Exception:
pass
if any(pred(code) for pred in wait_on_exit_predicates):
_wait_for_user_input()
try:
adapter.channel.send_event("terminated")
except Exception:
pass
def _wait_for_user_input():
if sys.stdout and sys.stdin:
from ptvsd.common import log
can_getch = sys.stdin.isatty()
if can_getch:
try:
import msvcrt
except ImportError:
can_getch = False
if can_getch:
log.debug("msvcrt available - waiting for user input via getch()")
sys.stdout.write("Press any key to continue . . . ")
sys.stdout.flush()
msvcrt.getch()
else:
log.debug("msvcrt not available - waiting for user input via read()")
sys.stdout.write("Press Enter to continue . . . ")
sys.stdout.flush()
sys.stdin.read(1)

View file

@ -0,0 +1,102 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, division, print_function, unicode_literals
import codecs
import os
import threading
from ptvsd.common import log
from ptvsd.launcher import adapter, debuggee
class CaptureOutput(object):
"""Captures output from the specified file descriptor, and tees it into another
file descriptor while generating DAP "output" events for it.
"""
instances = {}
"""Keys are output categories, values are CaptureOutput instances."""
def __init__(self, category, fd, tee_fd, encoding):
assert category not in self.instances
self.instances[category] = self
log.info("Capturing {0} of {1}.", category, debuggee.describe())
self.category = category
self._fd = fd
self._tee_fd = tee_fd
try:
self._decoder = codecs.getincrementaldecoder(encoding)(errors="replace")
except LookupError:
self._decoder = None
log.warning(
'Unable to generate "output" events for {0} - unknown encoding {1!r}',
category,
encoding,
)
self._worker_thread = threading.Thread(target=self._worker, name=category)
self._worker_thread.start()
def __del__(self):
fd = self._fd
if fd is not None:
try:
os.close(fd)
except Exception:
pass
def _send_output_event(self, s, final=False):
if self._decoder is None:
return
s = self._decoder.decode(s, final=final)
if len(s) == 0:
return
try:
adapter.channel.send_event(
"output", {"category": self.category, "output": s}
)
except Exception:
pass # channel to adapter is already closed
def _worker(self):
while self._fd is not None:
try:
s = os.read(self._fd, 0x1000)
except Exception:
break
size = len(s)
if size == 0:
break
# Tee the output first, before sending the "output" event.
i = 0
while i < size:
written = os.write(self._tee_fd, s[i:])
i += written
if not written:
# This means that the output stream was closed from the other end.
# Do the same to the debuggee, so that it knows as well.
os.close(self._fd)
self._fd = None
break
self._send_output_event(s)
# Flush any remaining data in the incremental decoder.
self._send_output_event(b"", final=True)
def wait_for_remaining_output():
"""Waits for all remaining output to be captured and propagated.
"""
for category, instance in CaptureOutput.instances.items():
log.info("Waiting for remaining {0} of {1}.", category, debuggee.describe())
instance._worker_thread.join()

View file

@ -1,92 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
if True:
# On Win32, colorama is not active when pytest-timeout dumps captured output
# on timeout, and ANSI sequences aren't properly interpreted.
# TODO: re-enable on Windows after enabling proper ANSI sequence handling:
# https://docs.microsoft.com/en-us/windows/console/console-virtual-terminal-sequences
#
# Azure Pipelines doesn't support ANSI sequences at all.
# TODO: re-enable on all platforms after adding Azure Pipelines detection.
RESET = ''
BLACK = ''
BLUE = ''
CYAN = ''
GREEN = ''
RED = ''
WHITE = ''
LIGHT_BLACK = ''
LIGHT_BLUE = ''
LIGHT_CYAN = ''
LIGHT_GREEN = ''
LIGHT_MAGENTA = ''
LIGHT_RED = ''
LIGHT_WHITE = ''
LIGHT_YELLOW = ''
def colorize_json(s):
return s
def color_repr(obj):
return repr(obj)
else:
from colorama import Fore
from pygments import highlight, lexers, formatters, token
# Colors that are commented out don't work with PowerShell.
RESET = Fore.RESET
BLACK = Fore.BLACK
BLUE = Fore.BLUE
CYAN = Fore.CYAN
GREEN = Fore.GREEN
# MAGENTA = Fore.MAGENTA
RED = Fore.RED
WHITE = Fore.WHITE
# YELLOW = Fore.YELLOW
LIGHT_BLACK = Fore.LIGHTBLACK_EX
LIGHT_BLUE = Fore.LIGHTBLUE_EX
LIGHT_CYAN = Fore.LIGHTCYAN_EX
LIGHT_GREEN = Fore.LIGHTGREEN_EX
LIGHT_MAGENTA = Fore.LIGHTMAGENTA_EX
LIGHT_RED = Fore.LIGHTRED_EX
LIGHT_WHITE = Fore.LIGHTWHITE_EX
LIGHT_YELLOW = Fore.LIGHTYELLOW_EX
color_scheme = {
token.Token: ('white', 'white'),
token.Punctuation: ('', ''),
token.Operator: ('', ''),
token.Literal: ('brown', 'brown'),
token.Keyword: ('brown', 'brown'),
token.Name: ('white', 'white'),
token.Name.Constant: ('brown', 'brown'),
token.Name.Attribute: ('brown', 'brown'),
# token.Name.Tag: ('white', 'white'),
# token.Name.Function: ('white', 'white'),
# token.Name.Variable: ('white', 'white'),
}
formatter = formatters.TerminalFormatter(colorscheme=color_scheme)
json_lexer = lexers.JsonLexer()
python_lexer = lexers.PythonLexer()
def colorize_json(s):
return highlight(s, json_lexer, formatter).rstrip()
def color_repr(obj):
return highlight(repr(obj), python_lexer, formatter).rstrip()

21
tests/debug/__init__.py Normal file
View file

@ -0,0 +1,21 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, division, print_function, unicode_literals
import py
import ptvsd
PTVSD_DIR = py.path.local(ptvsd.__file__) / ".."
PTVSD_ADAPTER_DIR = PTVSD_DIR / "adapter"
# Added to the environment variables of all adapters and servers.
PTVSD_ENV = {"PYTHONUNBUFFERED": "1"}
# Expose Session directly.
def Session(*args, **kwargs):
from tests.debug import session
return session.Session(*args, **kwargs)

123
tests/debug/comms.py Normal file
View file

@ -0,0 +1,123 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, division, print_function, unicode_literals
"""Various means of communication with the debuggee."""
import threading
import socket
from ptvsd.common import fmt, log, messaging
from tests.timeline import Request, Response
class BackChannel(object):
TIMEOUT = 20
def __init__(self, session):
self.session = session
self.port = None
self._established = threading.Event()
self._socket = None
self._server_socket = None
def __str__(self):
return fmt("backchannel-{0}", self.session.id)
def listen(self):
self._server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._server_socket.settimeout(self.TIMEOUT)
self._server_socket.bind(("127.0.0.1", 0))
_, self.port = self._server_socket.getsockname()
self._server_socket.listen(0)
def accept_worker():
log.info(
"Listening for incoming connection from {0} on port {1}...",
self,
self.port,
)
try:
self._socket, _ = self._server_socket.accept()
except socket.timeout:
raise log.exception("Timed out waiting for {0} to connect", self)
self._socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
log.info("Incoming connection from {0} accepted.", self)
self._setup_stream()
accept_thread = threading.Thread(
target=accept_worker, name=fmt("{0} listener", self)
)
accept_thread.daemon = True
accept_thread.start()
def _setup_stream(self):
self._stream = messaging.JsonIOStream.from_socket(self._socket, name=str(self))
self._established.set()
def receive(self):
self._established.wait()
return self._stream.read_json()
def send(self, value):
self.session.timeline.unfreeze()
self._established.wait()
t = self.session.timeline.mark(("sending", value))
self._stream.write_json(value)
return t
def expect(self, expected):
actual = self.receive()
assert expected == actual, fmt(
"Test expected {0!r} on backchannel, but got {1!r} from the debuggee",
expected,
actual,
)
def close(self):
if self._socket:
log.debug("Closing {0} socket of {1}...", self, self.session)
try:
self._socket.shutdown(socket.SHUT_RDWR)
except Exception:
pass
self._socket = None
if self._server_socket:
log.debug("Closing {0} server socket of {1}...", self, self.session)
try:
self._server_socket.shutdown(socket.SHUT_RDWR)
except Exception:
pass
self._server_socket = None
class ScratchPad(object):
def __init__(self, session):
self.session = session
def __getitem__(self, key):
raise NotImplementedError
def __setitem__(self, key, value):
"""Sets debug_me.scratchpad[key] = value inside the debugged process.
"""
stackTrace_responses = self.session.all_occurrences_of(
Response(Request("stackTrace"))
)
assert (
stackTrace_responses
), 'scratchpad requires at least one "stackTrace" request in the timeline.'
stack_trace = stackTrace_responses[-1].body
frame_id = stack_trace["stackFrames"][0]["id"]
log.info("{0} debug_me.scratchpad[{1!r}] = {2!r}", self.session, key, value)
expr = fmt("__import__('debug_me').scratchpad[{0!r}] = {1!r}", key, value)
self.session.request(
"evaluate", {"frameId": frame_id, "context": "repl", "expression": expr}
)

109
tests/debug/output.py Normal file
View file

@ -0,0 +1,109 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, division, print_function, unicode_literals
import threading
from ptvsd.common import fmt, log
class CaptureOutput(object):
"""Captures stdout and stderr of the debugged process.
"""
def __init__(self, session):
self.session = session
self._lock = threading.Lock()
self._chunks = {}
self._worker_threads = []
def __str__(self):
return fmt("CaptureOutput({0})", self.session)
def _worker(self, pipe, name):
chunks = self._chunks[name]
while True:
try:
chunk = pipe.read(0x1000)
except Exception:
break
if not len(chunk):
break
log.info("{0} {1}> {2!r}", self.session, name, chunk)
with self._lock:
chunks.append(chunk)
def _capture(self, pipe, name):
assert name not in self._chunks
self._chunks[name] = []
thread = threading.Thread(
target=lambda: self._worker(pipe, name), name=fmt("{0} {1}", self, name)
)
thread.daemon = True
thread.start()
self._worker_threads.append(thread)
def capture(self, process):
"""Start capturing stdout and stderr of the process.
"""
assert not self._worker_threads
log.info("Capturing {0} stdout and stderr", self.session)
self._capture(process.stdout, "stdout")
self._capture(process.stderr, "stderr")
def wait(self, timeout=None):
"""Wait for all remaining output to be captured.
"""
if not self._worker_threads:
return
log.debug("Waiting for remaining {0} stdout and stderr...", self.session)
for t in self._worker_threads:
t.join(timeout)
self._worker_threads[:] = []
def _output(self, which, encoding, lines):
try:
result = self._chunks[which]
except KeyError:
raise AssertionError(
fmt("{0} was not captured for {1}", which, self.session)
)
with self._lock:
result = b"".join(result)
if encoding is not None:
result = result.decode(encoding)
return result.splitlines() if lines else result
def stdout(self, encoding=None):
"""Returns stdout captured from the debugged process, as a single string.
If encoding is None, returns bytes. Otherwise, returns unicode.
"""
return self._output("stdout", encoding, lines=False)
def stderr(self, encoding=None):
"""Returns stderr captured from the debugged process, as a single string.
If encoding is None, returns bytes. Otherwise, returns unicode.
"""
return self._output("stderr", encoding, lines=False)
def stdout_lines(self, encoding=None):
"""Returns stdout captured from the debugged process, as a list of lines.
If encoding is None, each line is bytes. Otherwise, each line is unicode.
"""
return self._output("stdout", encoding, lines=True)
def stderr_lines(self, encoding=None):
"""Returns stderr captured from the debugged process, as a list of lines.
If encoding is None, each line is bytes. Otherwise, each line is unicode.
"""
return self._output("stderr", encoding, lines=True)

View file

@ -2,58 +2,27 @@
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, division, print_function, unicode_literals
import collections
import itertools
import os
import psutil
import py.path
import subprocess
import sys
import tests
import ptvsd
from ptvsd.common import compat, fmt, json, log, messaging
from ptvsd.common.compat import unicode
from tests import code, watchdog, helpers, timeline
import tests
from tests import code, debug, timeline, watchdog
from tests.debug import comms, output
from tests.patterns import some
StopInfo = collections.namedtuple(
"StopInfo", ["body", "frames", "thread_id", "frame_id"]
)
PTVSD_DIR = py.path.local(ptvsd.__file__) / ".."
PTVSD_ADAPTER_DIR = PTVSD_DIR / "adapter"
# Added to the environment variables of every new debug.Session
PTVSD_ENV = {"PYTHONUNBUFFERED": "1"}
def kill_process_tree(process):
log.info("Killing {0} process tree...", process.pid)
procs = [process]
try:
procs += process.children(recursive=True)
except Exception:
pass
for p in procs:
log.warning(
"Killing {0}process (pid={1})",
"" if p.pid == process.pid else "child ",
p.pid,
)
try:
p.kill()
except psutil.NoSuchProcess:
pass
except Exception:
log.exception()
log.info("Killed {0} process tree", process.pid)
class Session(object):
counter = itertools.count(1)
@ -83,7 +52,9 @@ class Session(object):
self.ignore_unobserved.extend(self.start_method.ignore_unobserved)
self.adapter_process = None
self.backchannel = helpers.BackChannel(self) if backchannel else None
self.channel = None
self.backchannel = comms.BackChannel(self) if backchannel else None
self.scratchpad = comms.ScratchPad(self)
# Expose some common members of timeline directly - these should be the ones
# that are the most straightforward to use, and are difficult to use incorrectly.
@ -115,16 +86,31 @@ class Session(object):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is not None:
if exc_type is None:
# Only wait for debuggee if there was no exception in the test - if there
# was one, the debuggee might still be waiting for further requests.
self.start_method.wait_for_debuggee()
else:
# Log the error, in case another one happens during shutdown.
log.exception(exc_info=(exc_type, exc_val, exc_tb))
try:
self.wait_for_exit()
except Exception:
raise log.exception()
self.disconnect()
self._stop_adapter()
# If there was an exception, don't complain about unobserved occurrences -
# they are expected if the test didn't complete.
if exc_type is not None:
self.timeline.observe_all()
self.timeline.close()
if self.adapter_process is not None:
log.info(
"Waiting for {0} with PID={1} to exit.",
self.adapter_id,
self.adapter_process.pid,
)
self.adapter_process.wait()
watchdog.unregister_spawn(self.adapter_process.pid, self.adapter_id)
self.adapter_process = None
if self.backchannel:
self.backchannel.close()
@ -200,16 +186,24 @@ class Session(object):
except Exception:
pass
def _start_adapter(self):
args = [sys.executable, PTVSD_ADAPTER_DIR]
def _process_disconnect(self):
self.timeline.mark("disconnect", block=False)
def _start_adapter(self):
args = [sys.executable, debug.PTVSD_ADAPTER_DIR]
if self.log_dir is not None:
args += ["--log-dir", self.log_dir]
args = [compat.filename_str(s) for s in args]
env = os.environ.copy()
env.update(debug.PTVSD_ENV)
env = {
compat.filename_str(k): compat.filename_str(v) for k, v in env.items()
}
log.info("Spawning {0}: {1!j}", self.adapter_id, args)
args = [compat.filename_str(s) for s in args]
self.adapter_process = psutil.Popen(
args, bufsize=0, stdin=subprocess.PIPE, stdout=subprocess.PIPE
args, bufsize=0, stdin=subprocess.PIPE, stdout=subprocess.PIPE, env=env
)
log.info("Spawned {0} with PID={1}", self.adapter_id, self.adapter_process.pid)
watchdog.register_spawn(self.adapter_process.pid, self.adapter_id)
@ -218,28 +212,13 @@ class Session(object):
self.adapter_process, name=str(self)
)
handlers = messaging.MessageHandlers(
request=self._process_request, event=self._process_event
request=self._process_request,
event=self._process_event,
disconnect=self._process_disconnect,
)
self.channel = messaging.JsonMessageChannel(stream, handlers)
self.channel.start()
def _stop_adapter(self):
if self.adapter_process is None:
return
self.channel.close()
self.timeline.finalize()
self.timeline.close()
log.info(
"Waiting for {0} with PID={1} to exit.",
self.adapter_id,
self.adapter_process.pid,
)
self.adapter_process.wait()
watchdog.unregister_spawn(self.adapter_process.pid, self.adapter_id)
self.adapter_process = None
def _handshake(self):
telemetry = self.wait_for_next_event("output")
assert telemetry == {
@ -267,13 +246,13 @@ class Session(object):
def configure(self, run_as, target, env=None, **kwargs):
env = {} if env is None else dict(env)
env.update(PTVSD_ENV)
env.update(debug.PTVSD_ENV)
pythonpath = env.get("PYTHONPATH", "")
if pythonpath:
pythonpath += os.pathsep
pythonpath += (tests.root / "DEBUGGEE_PYTHONPATH").strpath
pythonpath += os.pathsep + (PTVSD_DIR / "..").strpath
pythonpath += os.pathsep + (debug.PTVSD_DIR / "..").strpath
env["PYTHONPATH"] = pythonpath
env["PTVSD_SESSION_ID"] = str(self.id)
@ -285,17 +264,24 @@ class Session(object):
if self.log_dir is not None:
kwargs["logToFile"] = True
self.captured_output = output.CaptureOutput(self)
self.start_method.configure(run_as, target, env=env, **kwargs)
def start_debugging(self):
self.start_method.start_debugging()
start_request = self.start_method.start_debugging()
process = self.wait_for_next_event("process", freeze=False)
assert process == some.dict.containing(
{
"startMethod": start_request.command,
"name": some.str,
"isLocalProcess": True,
"systemProcessId": some.int,
}
)
def request_continue(self):
self.request("continue", freeze=False)
def request_disconnect(self):
self.request("disconnect")
def set_breakpoints(self, path, lines):
"""Sets breakpoints in the specified file, and returns the list of all the
corresponding DAP Breakpoint objects in the same order.
@ -412,7 +398,7 @@ class Session(object):
"exception",
"breakpoint",
"entry",
"goto"
"goto",
]:
expected_stopped["preserveFocusHint"] = True
assert stopped == some.dict.containing(expected_stopped)
@ -444,11 +430,24 @@ class Session(object):
return "".join(event("output", unicode) for event in events)
def captured_stdout(self, encoding=None):
return self.start_method.captured_output.stdout(encoding)
return self.captured_output.stdout(encoding)
def captured_stderr(self, encoding=None):
return self.start_method.captured_output.stderr(encoding)
return self.captured_output.stderr(encoding)
def wait_for_exit(self):
self.start_method.wait_for_debuggee()
self.request_disconnect()
def wait_for_disconnect(self):
self.timeline.wait_for_next(timeline.Mark("disconnect"))
def disconnect(self):
if self.channel is None:
return
try:
self.request("disconnect")
finally:
try:
self.channel.close()
except Exception:
pass
self.channel.wait()
self.channel = None

View file

@ -2,7 +2,7 @@
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
from __future__ import absolute_import, division, print_function, unicode_literals
import os
@ -16,7 +16,7 @@ import time
from ptvsd.common import compat, fmt, json, log
from ptvsd.common.compat import unicode
from tests import helpers, net, watchdog
from tests import net, timeline, watchdog
from tests.patterns import some
@ -27,7 +27,7 @@ PTVSD_PORT = net.get_test_server_port(5678, 5800)
# and start_method is attach_socket_*
PTVSD_DEBUG_ME = """
import ptvsd
ptvsd.enable_attach(("127.0.0.1", {ptvsd_port}), log_dir={log_dir})
ptvsd.enable_attach(("127.0.0.1", {ptvsd_port}), log_dir={log_dir!r})
ptvsd.wait_for_attach()
"""
@ -38,7 +38,6 @@ class DebugStartBase(object):
def __init__(self, session, method="base"):
self.session = session
self.method = method
self.captured_output = helpers.CapturedOutput(self.session)
self.debuggee_process = None
self.expected_exit_code = None
@ -47,11 +46,19 @@ class DebugStartBase(object):
def wait_for_debuggee(self):
# TODO: Exit should not be restricted to launch tests only
if self.expected_exit_code is not None and 'launch' in self.method:
exited = self.session.wait_for_next_event("exited", freeze=False)
assert exited == some.dict.containing({"exitCode": self.expected_exit_code})
if "launch" in self.method:
exited = self.session.timeline.wait_until_realized(
timeline.Event("exited")
).body
assert exited == some.dict.containing(
{
"exitCode": some.int
if self.expected_exit_code is None
else self.expected_exit_code
}
)
self.session.wait_for_next_event("terminated")
self.session.timeline.wait_until_realized(timeline.Event("terminated"))
if self.debuggee_process is None:
return
@ -85,6 +92,7 @@ class DebugStartBase(object):
steppingResumesAllThreads=None,
rules=None,
successExitCodes=None,
pathMappings=None,
):
if logToFile:
args["logToFile"] = logToFile
@ -141,6 +149,9 @@ class DebugStartBase(object):
if successExitCodes:
args["successExitCodes"] = successExitCodes
if pathMappings is not None:
args["pathMappings"] = pathMappings
def __str__(self):
return self.method
@ -242,16 +253,6 @@ class Launch(DebugStartBase):
self._build_common_args(launch_args, **kwargs)
return launch_args
def _wait_for_process_event(self):
process_body = self.session.wait_for_next_event("process", freeze=False)
assert process_body == {
"name": some.str,
"isLocalProcess": True,
"startMethod": "launch",
"systemProcessId": some.int,
}
return process_body
def configure(self, run_as, target, **kwargs):
self._launch_args = self._build_launch_args({}, run_as, target, **kwargs)
self.no_debug = self._launch_args.get("noDebug", False)
@ -271,11 +272,11 @@ class Launch(DebugStartBase):
self.session.request("configurationDone")
self._launch_request.wait_for_response(freeze=False)
self._wait_for_process_event()
return self._launch_request
def run_in_terminal(self, request):
args = request("args", json.array(unicode))
cwd = request("cwd", unicode)
cwd = request("cwd", ".")
env = os.environ.copy()
env.update(request("env", json.object(unicode)))
@ -301,7 +302,7 @@ class Launch(DebugStartBase):
stderr=subprocess.PIPE,
)
watchdog.register_spawn(self.debuggee_process.pid, self.session.debuggee_id)
self.captured_output.capture(self.debuggee_process)
self.session.captured_output.capture(self.debuggee_process)
return {}
@ -319,7 +320,6 @@ class AttachBase(DebugStartBase):
target,
host="127.0.0.1",
port=PTVSD_PORT,
pathMappings=None,
**kwargs
):
assert host is not None
@ -337,9 +337,6 @@ class AttachBase(DebugStartBase):
attach_args["host"] = host
attach_args["port"] = port
if pathMappings is not None:
attach_args["pathMappings"] = pathMappings
self._build_common_args(attach_args, **kwargs)
return attach_args
@ -372,6 +369,7 @@ class AttachBase(DebugStartBase):
cli_args += kwargs.get("args")
cli_args = [compat.filename_str(s) for s in cli_args]
env = {compat.filename_str(k): compat.filename_str(v) for k, v in env.items()}
cwd = kwargs.get("cwd")
if cwd:
@ -392,7 +390,8 @@ class AttachBase(DebugStartBase):
cwd,
env_str,
)
self.debuggee_process = subprocess.Popen(
self.debuggee_process = psutil.Popen(
cli_args,
cwd=cwd,
env=env,
@ -402,19 +401,22 @@ class AttachBase(DebugStartBase):
stderr=subprocess.PIPE,
)
watchdog.register_spawn(self.debuggee_process.pid, self.session.debuggee_id)
self.captured_output.capture(self.debuggee_process)
self.session.captured_output.capture(self.debuggee_process)
pid = self.debuggee_process.pid
if self.method == "attach_pid":
self._attach_args["processId"] = pid
else:
connected = False
while connected is False:
time.sleep(0.1)
else:
log.info(
"Waiting for {0} to open listener socket...", self.session.debuggee_id
)
for i in range(0, 100):
connections = psutil.net_connections()
connected = (
any(p for (_, _, _, _, _, _, p) in connections if p == pid)
)
if any(p == pid for (_, _, _, _, _, _, p) in connections):
break
time.sleep(0.1)
else:
log.warning("Couldn't detect open listener socket; proceeding anyway.")
self._attach_request = self.session.send_request("attach", self._attach_args)
self.session.wait_for_next_event("initialized")
@ -426,17 +428,8 @@ class AttachBase(DebugStartBase):
if self.no_debug:
log.info('{0} ignoring "noDebug" in "attach"', self.session)
process_body = self.session.wait_for_next_event("process")
assert process_body == some.dict.containing(
{
"name": some.str,
"isLocalProcess": True,
"startMethod": "attach",
"systemProcessId": some.int,
}
)
self._attach_request.wait_for_response()
return self._attach_request
class AttachSocketImport(AttachBase):
@ -475,10 +468,11 @@ class AttachSocketImport(AttachBase):
self._attach_args = self._build_attach_args({}, run_as, target, **kwargs)
ptvsd_port = self._attach_args["port"]
log_dir = None
if self._attach_args.get("logToFile", False):
log_dir = '"' + self.session.log_dir + '"'
log_dir = (
self.session.log_dir
if not self._attach_args.get("logToFile", False)
else None
)
env["PTVSD_DEBUG_ME"] = fmt(
PTVSD_DEBUG_ME, ptvsd_port=ptvsd_port, log_dir=log_dir
)
@ -536,7 +530,6 @@ class AttachProcessId(AttachBase):
def __init__(self, session):
super(AttachProcessId, self).__init__(session, "attach_pid")
def configure(
self,
run_as,
@ -561,6 +554,7 @@ class AttachProcessId(AttachBase):
run_as, target, cwd=cwd, env=env, args=args, cli_args=cli_args, **kwargs
)
class CustomServer(DebugStartBase):
def __init__(self, session):
super().__init__(session, "custom_server")

View file

@ -1,248 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
import threading
import time
import socket
import sys
from ptvsd.common import fmt, log, messaging
from tests.timeline import Request, Response
if sys.version_info >= (3, 5):
clock = time.monotonic
else:
clock = time.clock
timestamp_zero = clock()
def timestamp():
return clock() - timestamp_zero
class CapturedOutput(object):
"""Captured stdout and stderr of the debugged process.
"""
def __init__(self, session):
self.session = session
self._lock = threading.Lock()
self._lines = {}
self._worker_threads = []
def __str__(self):
return fmt("CapturedOutput({0})", self.session)
def _worker(self, pipe, name):
lines = self._lines[name]
while True:
try:
line = pipe.readline()
except Exception:
line = None
if line:
log.info("{0} {1}> {2!r}", self.session, name, line)
with self._lock:
lines.append(line)
else:
break
def _capture(self, pipe, name):
assert name not in self._lines
self._lines[name] = []
thread = threading.Thread(
target=lambda: self._worker(pipe, name), name=fmt("{0} {1}", self, name)
)
thread.daemon = True
thread.start()
self._worker_threads.append(thread)
def capture(self, process):
"""Start capturing stdout and stderr of the process.
"""
assert not self._worker_threads
log.info("Capturing {0} stdout and stderr", self.session)
self._capture(process.stdout, "stdout")
self._capture(process.stderr, "stderr")
def wait(self, timeout=None):
"""Wait for all remaining output to be captured.
"""
if not self._worker_threads:
return
log.debug("Waiting for remaining {0} stdout and stderr...", self.session)
for t in self._worker_threads:
t.join(timeout)
self._worker_threads[:] = []
def _output(self, which, encoding, lines):
assert self.session.timeline.is_frozen
try:
result = self._lines[which]
except KeyError:
raise AssertionError(
fmt("{0} was not captured for {1}", which, self.session)
)
# The list might still be appended to concurrently, so take a snapshot of it.
with self._lock:
result = list(result)
if encoding is not None:
result = [s.decode(encoding) for s in result]
if not lines:
sep = b"" if encoding is None else ""
result = sep.join(result)
return result
def stdout(self, encoding=None):
"""Returns stdout captured from the debugged process, as a single string.
If encoding is None, returns bytes. Otherwise, returns unicode.
"""
return self._output("stdout", encoding, lines=False)
def stderr(self, encoding=None):
"""Returns stderr captured from the debugged process, as a single string.
If encoding is None, returns bytes. Otherwise, returns unicode.
"""
return self._output("stderr", encoding, lines=False)
def stdout_lines(self, encoding=None):
"""Returns stdout captured from the debugged process, as a list of lines.
If encoding is None, each line is bytes. Otherwise, each line is unicode.
"""
return self._output("stdout", encoding, lines=True)
def stderr_lines(self, encoding=None):
"""Returns stderr captured from the debugged process, as a list of lines.
If encoding is None, each line is bytes. Otherwise, each line is unicode.
"""
return self._output("stderr", encoding, lines=True)
class BackChannel(object):
TIMEOUT = 20
def __init__(self, session):
self.session = session
self.port = None
self._established = threading.Event()
self._socket = None
self._server_socket = None
def __str__(self):
return fmt("backchannel-{0}", self.session.id)
def listen(self):
self._server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._server_socket.settimeout(self.TIMEOUT)
self._server_socket.bind(('127.0.0.1', 0))
_, self.port = self._server_socket.getsockname()
self._server_socket.listen(0)
def accept_worker():
log.info('Listening for incoming connection from {0} on port {1}...', self, self.port)
try:
self._socket, _ = self._server_socket.accept()
except socket.timeout:
raise log.exception("Timed out waiting for {0} to connect", self)
self._socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
log.info('Incoming connection from {0} accepted.', self)
self._setup_stream()
accept_thread = threading.Thread(
target=accept_worker,
name=fmt('{0} listener', self)
)
accept_thread.daemon = True
accept_thread.start()
def _setup_stream(self):
self._stream = messaging.JsonIOStream.from_socket(self._socket, name=str(self))
self._established.set()
def receive(self):
self._established.wait()
return self._stream.read_json()
def send(self, value):
self.session.timeline.unfreeze()
self._established.wait()
t = self.session.timeline.mark(('sending', value))
self._stream.write_json(value)
return t
def expect(self, expected):
actual = self.receive()
assert expected == actual, fmt(
"Test expected {0!r} on backchannel, but got {1!r} from the debuggee",
expected,
actual,
)
def close(self):
if self._socket:
log.debug('Closing {0} socket of {1}...', self, self.session)
try:
self._socket.shutdown(socket.SHUT_RDWR)
except Exception:
pass
self._socket = None
if self._server_socket:
log.debug('Closing {0} server socket of {1}...', self, self.session)
try:
self._server_socket.shutdown(socket.SHUT_RDWR)
except Exception:
pass
self._server_socket = None
class ScratchPad(object):
def __init__(self, session):
self.session = session
def __getitem__(self, key):
raise NotImplementedError
def __setitem__(self, key, value):
"""Sets debug_me.scratchpad[key] = value inside the debugged process.
"""
stackTrace_responses = self.session.all_occurrences_of(
Response(Request("stackTrace"))
)
assert stackTrace_responses, (
'scratchpad requires at least one "stackTrace" request in the timeline.'
)
stack_trace = stackTrace_responses[-1].body
frame_id = stack_trace["stackFrames"][0]["id"]
log.info("{0} debug_me.scratchpad[{1!r}] = {2!r}", self.session, key, value)
expr = fmt(
"__import__('debug_me').scratchpad[{0!r}] = {1!r}",
key,
value,
)
self.session.request(
"evaluate",
{
"frameId": frame_id,
"context": "repl",
"expression": expr,
},
)

View file

@ -1,42 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
import threading
from ptvsd.common.compat import queue
from tests import colors
from tests.helpers import timestamp
real_print = print
print_queue = queue.Queue()
def print(*args, **kwargs):
"""Like builtin print(), but synchronized across multiple threads,
and adds a timestamp.
"""
timestamped = kwargs.pop('timestamped', True)
t = timestamp() if timestamped else None
print_queue.put((t, args, kwargs))
def wait_for_output():
print_queue.join()
def print_worker():
while True:
t, args, kwargs = print_queue.get()
if t is not None:
t = colors.LIGHT_BLACK + ('@%09.6f:' % t) + colors.RESET
args = (t,) + args
real_print(*args, **kwargs)
print_queue.task_done()
print_thread = threading.Thread(target=print_worker, name='printer')
print_thread.daemon = True
print_thread.start()

View file

@ -1,164 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from __future__ import absolute_import, print_function, unicode_literals
import errno
import os.path
import platform
import pytest
import socket
import subprocess
import sys
from ptvsd.common import launcher
launcher_py = os.path.abspath(launcher.__file__)
class ReceivePid(object):
def start_server(self):
self.listener = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.listener.bind(("127.0.0.1", 0))
self.listener.listen(1)
self.host, self.port = self.listener.getsockname()
return (self.host, self.port)
def wait_for_pid(self):
try:
sock, _ = self.listener.accept()
finally:
self.listener.close()
try:
data = sock.makefile().read()
finally:
sock.close()
return -1 if data == b"" else int(data)
@pytest.mark.parametrize("run_as", ["program", "module", "code"])
@pytest.mark.parametrize("mode", ["normal", "abnormal", "normal+abnormal", ""])
@pytest.mark.parametrize("seperator", ["seperator", ""])
@pytest.mark.parametrize("port", ["12345", ""])
def test_launcher_parser(mode, seperator, run_as, port):
args = []
switch = mode.split("+")
if "normal" in switch:
args += [launcher.WAIT_ON_NORMAL_SWITCH]
if "abnormal" in switch:
args += [launcher.WAIT_ON_ABNORMAL_SWITCH]
if port:
args += [launcher.INTERNAL_PORT_SWITCH, port]
if seperator:
args += ["--"]
if run_as == "file":
expected = ["myscript.py", "--arg1", "--arg2", "--arg3", "--", "more args"]
elif run_as == "module":
expected = ["-m", "myscript", "--arg1", "--arg2", "--arg3", "--", "more args"]
else:
expected = ["-c", "some code"]
args += expected
if seperator:
actual = list(launcher.parse(args))
assert actual == expected
else:
with pytest.raises(AssertionError):
actual = launcher.parse(args)
@pytest.mark.parametrize("run_as", ["program", "module", "code"])
@pytest.mark.parametrize("mode", ["normal", "abnormal", "normal+abnormal", ""])
@pytest.mark.parametrize("exit_code", [0, 10])
@pytest.mark.timeout(5)
@pytest.mark.skipif(platform.system() == "Windows", reason="Not reliable on windows.")
def test_launcher(pyfile, mode, exit_code, run_as):
@pyfile
def code_to_run():
import sys
sys.exit(int(sys.argv[1]))
args = [sys.executable, launcher_py]
switch = mode.split("+")
pid_server = ReceivePid()
_, port = pid_server.start_server()
if "normal" in switch:
args += [launcher.WAIT_ON_NORMAL_SWITCH]
if "abnormal" in switch:
args += [launcher.WAIT_ON_ABNORMAL_SWITCH]
args += [launcher.INTERNAL_PORT_SWITCH, str(port)]
args += ["--"]
if run_as == "file":
args += [code_to_run.strpath, str(exit_code)]
elif run_as == "module":
args += ["-m", "code_to_run", str(exit_code)]
else:
with open(code_to_run.strpath, "r") as f:
args += ["-c", f.read(), str(exit_code)]
wait_for_user = (exit_code, mode) in [
(0, "normal"),
(10, "abnormal"),
(0, "normal+abnormal"),
(10, "normal+abnormal"),
]
if platform.system() == "Windows":
p = subprocess.Popen(
args=args,
cwd=os.path.dirname(code_to_run.strpath),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
# CREATE_NEW_CONSOLE is needed other wise you cannot write to stdin.
creationflags=subprocess.CREATE_NEW_CONSOLE,
)
# NOTE: We disabled this test on windows because there is no
# reliable way to write to stdin without going though the Win32
# WriteConsoleInput.
else:
p = subprocess.Popen(
args=args,
cwd=os.path.dirname(code_to_run.strpath),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
)
assert pid_server.wait_for_pid() >= -1
if wait_for_user:
outstr = b""
while not outstr.endswith(b". . . "):
outstr += p.stdout.read(1)
exc_type = BrokenPipeError if sys.version_info >= (3,) else IOError
while p.poll() is None:
try:
p.stdin.write(b"\n")
p.stdin.flush()
except exc_type as exc:
# This can occur if the process exits before write completes.
if isinstance(exc, IOError) and exc.errno != errno.EPIPE:
raise
else:
p.wait()
assert exit_code == p.returncode

View file

@ -6,7 +6,8 @@ from __future__ import absolute_import, print_function, unicode_literals
import pytest
from tests import debug, test_data, start_methods
from tests import debug, test_data
from tests.debug import start_methods
from tests.patterns import some
from tests.timeline import Event
@ -84,7 +85,7 @@ def test_reattach(pyfile, start_method, run_as):
session.wait_for_stop(expected_frames=[
some.dap.frame(code_to_debug, "first"),
])
session.request_disconnect()
session.request("disconnect")
session.wait_for_disconnect()
with session.reattach(target=(run_as, code_to_debug)) as session2:
@ -93,7 +94,7 @@ def test_reattach(pyfile, start_method, run_as):
some.dap.frame(code_to_debug, "second"),
])
session.scratchpad["exit"] = True
session.request_disconnect()
session.request("disconnect")
session.wait_for_disconnect()

View file

@ -11,7 +11,8 @@ import re
import sys
from ptvsd.common import fmt
from tests import debug, test_data, start_methods
from tests import debug, test_data
from tests.debug import start_methods
from tests.patterns import some

View file

@ -7,7 +7,8 @@ from __future__ import absolute_import, print_function, unicode_literals
import os.path
import pytest
from tests import debug, start_methods
from tests import debug
from tests.debug import start_methods
from tests.patterns import some

View file

@ -6,7 +6,8 @@ from __future__ import absolute_import, print_function, unicode_literals
import pytest
from ptvsd.common import compat
from tests import code, debug, log, net, test_data, start_methods
from tests import code, debug, log, net, test_data
from tests.debug import start_methods
from tests.patterns import some
pytestmark = pytest.mark.timeout(60)

View file

@ -93,7 +93,7 @@ def test_vsc_exception_options_raise_without_except(
filters += ["raised"] if raised == "raisedOn" else []
filters += ["uncaught"] if uncaught == "uncaughtOn" else []
with debug.Session(start_method) as session:
session.ignore_unobserved += [Event("stopped")]
session.ignore_unobserved.append(Event("stopped"))
session.expected_exit_code = some.int
session.configure(run_as, code_to_debug)
session.send_request(
@ -274,7 +274,7 @@ def test_raise_exception_options(pyfile, start_method, run_as, exceptions, break
pass
with debug.Session(start_method) as session:
session.ignore_unobserved += [Event("stopped")]
session.ignore_unobserved.append(Event("stopped"))
session.expected_exit_code = some.int
session.configure(run_as, code_to_debug)
path = [{"names": ["Python Exceptions"]}]

View file

@ -9,7 +9,8 @@ import pytest
import sys
from ptvsd.common import compat
from tests import code, debug, log, net, test_data, start_methods
from tests import code, debug, log, net, test_data
from tests.debug import start_methods
from tests.patterns import some
pytestmark = pytest.mark.timeout(60)

View file

@ -8,7 +8,8 @@ import contextlib
import pytest
from ptvsd.common import compat
from tests import debug, start_methods
from tests import debug
from tests.debug import start_methods
@contextlib.contextmanager

View file

@ -9,11 +9,15 @@ import pytest
import sys
from ptvsd.common import messaging
from tests import debug, start_methods
from tests import debug
from tests.debug import start_methods
from tests.patterns import some
from tests.timeline import Event, Request
pytestmark = pytest.mark.skip("https://github.com/microsoft/ptvsd/issues/1706")
@pytest.mark.timeout(30)
@pytest.mark.skipif(
platform.system() != "Windows",
@ -245,7 +249,7 @@ def test_autokill(pyfile, start_method, run_as):
)
parent_session.start_debugging()
with parent_session.attach_to_next_subprocess() as child_session:
child_session.start_debugging()

View file

@ -10,7 +10,8 @@ import re
import ptvsd
from ptvsd.common import messaging
from tests import debug, test_data, start_methods
from tests import debug, test_data
from tests.debug import start_methods
from tests.patterns import some
from tests.timeline import Event
@ -24,8 +25,8 @@ def test_run(pyfile, start_method, run_as):
import sys
print("begin")
backchannel.wait_for("continue")
backchannel.send(path.abspath(sys.modules["ptvsd"].__file__))
backchannel.wait_for("continue")
print("end")
with debug.Session(start_method, backchannel=True) as session:
@ -33,24 +34,15 @@ def test_run(pyfile, start_method, run_as):
session.configure(run_as, code_to_debug)
session.start_debugging()
session.timeline.freeze()
process_event, = session.all_occurrences_of(Event("process"))
expected_name = (
"-c"
if run_as == "code"
else some.str.matching(re.escape(code_to_debug.strpath) + r"(c|o)?")
)
assert process_event == Event(
"process", some.dict.containing({"name": expected_name})
)
backchannel.send("continue")
expected_ptvsd_path = path.abspath(ptvsd.__file__)
backchannel.expect(
some.str.matching(re.escape(expected_ptvsd_path) + r"(c|o)?")
)
backchannel.send("continue")
session.wait_for_next_event("terminated")
session.proceed()
def test_run_submodule():
with debug.Session("launch") as session:

View file

@ -10,112 +10,134 @@ from tests import debug
from tests.patterns import some
def test_with_path_mappings(pyfile, tmpdir, run_as, start_method):
def test_with_path_mappings(pyfile, tmpdir, start_method, run_as):
@pyfile
def code_to_debug():
import debug_me # noqa
import debug_me # noqa
def full_function():
# Note that this function is not called, it's there just to make the mapping explicit.
print('cell1 line 2') # @map_to_cell1_line_2
print('cell1 line 3') # @map_to_cell1_line_3
print("cell1 line 2") # @map_to_cell1_line_2
print("cell1 line 3") # @map_to_cell1_line_3
print('cell2 line 2') # @map_to_cell2_line_2
print('cell2 line 3') # @map_to_cell2_line_3
print("cell2 line 2") # @map_to_cell2_line_2
print("cell2 line 3") # @map_to_cell2_line_3
def strip_lines(s):
return '\n'.join([line.strip() for line in s.splitlines()])
return "\n".join([line.strip() for line in s.splitlines()])
def create_code():
cell1_code = compile(strip_lines(''' # line 1
cell1_code = compile(
strip_lines(
""" # line 1
a = 1 # line 2
b = 2 # line 3
'''), '<cell1>', 'exec')
"""
),
"<cell1>",
"exec",
)
cell2_code = compile(strip_lines('''# line 1
cell2_code = compile(
strip_lines(
"""# line 1
c = 3 # line 2
d = 4 # line 3
'''), '<cell2>', 'exec')
"""
),
"<cell2>",
"exec",
)
return {'cell1': cell1_code, 'cell2': cell2_code}
return {"cell1": cell1_code, "cell2": cell2_code}
code = create_code()
exec(code['cell1'], {})
exec(code['cell1'], {})
exec(code["cell1"], {})
exec(code["cell1"], {})
exec(code['cell2'], {})
exec(code['cell2'], {})
print('ok')
exec(code["cell2"], {})
exec(code["cell2"], {})
print("ok")
with debug.Session(start_method) as session:
session.configure(run_as, code_to_debug)
map_to_cell_1_line2 = code_to_debug.lines['map_to_cell1_line_2']
map_to_cell_2_line2 = code_to_debug.lines['map_to_cell2_line_2']
map_to_cell_1_line2 = code_to_debug.lines["map_to_cell1_line_2"]
map_to_cell_2_line2 = code_to_debug.lines["map_to_cell2_line_2"]
source_entry = code_to_debug
if sys.platform == 'win32':
source_entry = code_to_debug.strpath
if sys.platform == "win32":
# Check if it matches even not normalized.
source_entry = code_to_debug[0].lower() + code_to_debug[1:].upper()
source_entry = source_entry.replace('\\', '/')
source_entry = source_entry[0].lower() + source_entry[1:].upper()
source_entry = source_entry.replace("\\", "/")
# Set breakpoints first and the map afterwards to make sure that it's reapplied.
session.set_breakpoints(code_to_debug, [map_to_cell_1_line2])
session.send_request('setPydevdSourceMap', arguments={
'source': {'path': source_entry},
'pydevdSourceMaps': [
{
'line': map_to_cell_1_line2,
'endLine': map_to_cell_1_line2 + 1,
'runtimeSource': {'path': '<cell1>'},
'runtimeLine': 2,
},
{
'line': map_to_cell_2_line2,
'endLine': map_to_cell_2_line2 + 1,
'runtimeSource': {'path': '<cell2>'},
'runtimeLine': 2,
},
],
}).wait_for_response()
session.request(
"setPydevdSourceMap",
{
"source": {"path": source_entry},
"pydevdSourceMaps": [
{
"line": map_to_cell_1_line2,
"endLine": map_to_cell_1_line2 + 1,
"runtimeSource": {"path": "<cell1>"},
"runtimeLine": 2,
},
{
"line": map_to_cell_2_line2,
"endLine": map_to_cell_2_line2 + 1,
"runtimeSource": {"path": "<cell2>"},
"runtimeLine": 2,
},
],
},
)
session.start_debugging()
hit = session.wait_for_thread_stopped('breakpoint')
frames = hit.stacktrace.body['stackFrames']
assert frames[0]['source']['path'] == some.path(code_to_debug)
session.wait_for_stop(
"breakpoint",
expected_frames=[some.dap.frame(code_to_debug, line=map_to_cell_1_line2)],
)
session.set_breakpoints(code_to_debug, [map_to_cell_2_line2])
# Leave only the cell2 mapping.
session.send_request('setPydevdSourceMap', arguments={
'source': {'path': source_entry},
'pydevdSourceMaps': [
{
'line': map_to_cell_2_line2,
'endLine': map_to_cell_2_line2 + 1,
'runtimeSource': {'path': '<cell2>'},
'runtimeLine': 2,
},
],
}).wait_for_response()
session.request(
"setPydevdSourceMap",
{
"source": {"path": source_entry},
"pydevdSourceMaps": [
{
"line": map_to_cell_2_line2,
"endLine": map_to_cell_2_line2 + 1,
"runtimeSource": {"path": "<cell2>"},
"runtimeLine": 2,
}
],
},
)
session.send_request('continue').wait_for_response()
hit = session.wait_for_thread_stopped('breakpoint')
session.request("continue")
session.wait_for_stop(
"breakpoint",
expected_frames=[some.dap.frame(code_to_debug, line=map_to_cell_2_line2)],
)
# Remove the cell2 mapping so that it doesn't stop again.
session.send_request('setPydevdSourceMap', arguments={
'source': {'path': source_entry},
'pydevdSourceMaps': [
{
'line': map_to_cell_1_line2,
'endLine': map_to_cell_1_line2 + 1,
'runtimeSource': {'path': '<cell1>'},
'runtimeLine': 2,
},
],
}).wait_for_response()
session.request(
"setPydevdSourceMap",
{
"source": {"path": source_entry},
"pydevdSourceMaps": [
{
"line": map_to_cell_1_line2,
"endLine": map_to_cell_1_line2 + 1,
"runtimeSource": {"path": "<cell1>"},
"runtimeLine": 2,
}
],
},
)
session.request_continue()

View file

@ -7,16 +7,36 @@ from __future__ import absolute_import, print_function, unicode_literals
import platform
import pytest
import sys
import time
from tests import debug, start_methods
from ptvsd.common import log
from tests import debug
from tests.debug import start_methods
from tests.patterns import some
def has_waited(session):
lines = session.captured_output.stdout_lines()
result = any(
s == some.bytes.matching(br"Press .* to continue . . .\s*") for s in lines
)
# log.info("!!! {1} {0!r}", lines, result)
return result
def wait_and_press_key(session):
log.info("Waiting for keypress prompt...")
while not has_waited(session):
time.sleep(0.1)
log.info("Simulating keypress.")
session.process.stdin.write(b" \r\n")
@pytest.mark.parametrize("start_method", [start_methods.Launch])
@pytest.mark.skipif(
sys.version_info < (3, 0) and platform.system() == "Windows",
reason="On Windows + Python 2, unable to send key strokes to test.",
)
def test_wait_on_normal_exit_enabled(pyfile, start_method, run_as):
def test_wait_on_normal_exit_enabled(pyfile, run_as):
@pyfile
def code_to_debug():
from debug_me import ptvsd
@ -24,74 +44,57 @@ def test_wait_on_normal_exit_enabled(pyfile, start_method, run_as):
ptvsd.break_into_debugger()
print() # line on which it'll actually break
with debug.Session(start_method) as session:
session.configure(
run_as, code_to_debug,
waitOnNormalExit=True,
)
with debug.Session(start_methods.Launch) as session:
session.configure(run_as, code_to_debug, waitOnNormalExit=True)
session.start_debugging()
session.wait_for_stop()
session.request_continue()
session.process.stdin.write(b" \r\n")
assert any(s.startswith("Press") for s in session.stdout_lines("utf-8"))
wait_and_press_key(session)
@pytest.mark.parametrize("start_method", [start_methods.Launch])
@pytest.mark.skipif(
sys.version_info < (3, 0) and platform.system() == "Windows",
reason="On Windows + Python 2, unable to send key strokes to test.",
)
def test_wait_on_abnormal_exit_enabled(pyfile, start_method, run_as):
def test_wait_on_abnormal_exit_enabled(pyfile, run_as):
@pyfile
def code_to_debug():
from debug_me import backchannel, ptvsd
from debug_me import ptvsd
import sys
ptvsd.break_into_debugger()
backchannel.send("done")
sys.exit(12345)
print() # line on which it'll actually break
sys.exit(42)
with debug.Session(start_method, backchannel=True) as session:
backchannel = session.backchannel
session.expected_exit_code = 12345
session.configure(
run_as, code_to_debug,
waitOnAbnormalExit=True,
)
with debug.Session(start_methods.Launch) as session:
session.expected_exit_code = 42
session.configure(run_as, code_to_debug, waitOnAbnormalExit=True)
session.start_debugging()
session.wait_for_stop()
session.request_continue()
assert backchannel.receive() == "done"
session.process.stdin.write(b" \r\n")
assert any(s.startswith("Press") for s in session.stdout_lines("utf-8"))
wait_and_press_key(session)
@pytest.mark.parametrize("start_method", [start_methods.Launch])
def test_exit_normally_with_wait_on_abnormal_exit_enabled(pyfile, start_method, run_as):
@pyfile
def code_to_debug():
from debug_me import backchannel, ptvsd
from debug_me import ptvsd
ptvsd.break_into_debugger()
backchannel.send("done")
print()
with debug.Session(start_method, backchannel=True) as session:
backchannel = session.backchannel
session.configure(
run_as, code_to_debug,
waitOnAbnormalExit=True,
)
with debug.Session(start_method) as session:
session.configure(run_as, code_to_debug, waitOnAbnormalExit=True)
session.start_debugging()
session.wait_for_stop()
session.request_continue()
session.wait_for_termination()
assert backchannel.receive() == "done"
session.wait_for_next_event("exited")
assert not has_waited(session)
session.proceed()

View file

@ -6,7 +6,8 @@ from __future__ import absolute_import, print_function, unicode_literals
import pytest
from tests import debug, start_methods
from tests import debug
from tests.debug import start_methods
from tests.patterns import some

View file

@ -5,7 +5,6 @@
from __future__ import absolute_import, print_function, unicode_literals
from tests import debug
from tests.printer import print
from tests.patterns import some
@ -52,7 +51,6 @@ def test_tracing(pyfile, start_method, run_as):
print(0) # @outer2
func(True)
print(code_to_debug.lines)
with debug.Session(start_method, client_id='vscode') as session:
session.configure(run_as, code_to_debug)
session.set_breakpoints(code_to_debug, all)

View file

@ -77,14 +77,21 @@ def test_module_events(pyfile, start_method, run_as):
session.start_debugging()
session.wait_for_stop()
modules = session.all_occurrences_of(Event("module"))
modules = [
(m.body["module"]["name"], m.body["module"]["path"]) for m in modules
]
assert modules[:3] == [
("module2", some.path(module2)),
("module1", some.path(module1)),
("__main__", some.path(test_code)),
]
# Stack trace after the stop will trigger module events, but they are only
# sent after the trace response, so we need to wait for them separately.
# The order isn't guaranteed, either, so just wait for any 3 modules.
session.timeline.wait_until_realized(
Event("module") >> Event("module") >> Event("module")
)
modules = {
event.body["module"]["name"]: event.body["module"]["path"]
for event in session.all_occurrences_of(Event("module"))
}
assert modules == {
"__main__": some.path(test_code),
"module1": some.path(module1),
"module2": some.path(module2),
}
session.request_continue()

View file

@ -14,7 +14,8 @@ import threading
import types
from ptvsd.common import compat, timestamp
from tests import code, pydevd_log, start_methods
from tests import code, pydevd_log
from tests.debug import start_methods
__all__ = ['run_as', 'start_method', 'with_pydevd_log', 'daemon', 'pyfile']

View file

@ -154,9 +154,14 @@ class Timeline(object):
for occ in occurrences:
occ.observed = True
def observe_all(self, expectation):
def observe_all(self, expectation=None):
self.expect_frozen()
self.observe(*[occ for occ in self if occ == expectation])
occs = (
list(self)
if expectation is None
else [occ for occ in self if occ == expectation]
)
self.observe(*occs)
def wait_until(self, condition, freeze=None):
freeze = freeze or self.is_frozen