Test fixes.

This commit is contained in:
Pavel Minaev 2019-07-03 00:38:47 -07:00 committed by Pavel Minaev
parent 0f76b660ed
commit 42bbc0946d
31 changed files with 547 additions and 271 deletions

View file

@ -8,6 +8,7 @@ from __future__ import absolute_import, print_function, unicode_literals
"""
import inspect
import itertools
import sys
from ptvsd.common import fmt
@ -30,6 +31,11 @@ try:
except AttributeError:
xrange = builtins.range
try:
izip = itertools.izip
except AttributeError:
izip = builtins.zip
try:
reload = builtins.reload
except AttributeError:
@ -41,54 +47,101 @@ except ImportError:
import Queue as queue # noqa
def force_unicode(s, encoding, errors="strict"):
"""Converts s to Unicode, using the provided encoding. If s is already Unicode,
it is returned as is.
"""
return s.decode(encoding, errors) if isinstance(s, bytes) else s
return s.decode(encoding, errors) if isinstance(s, bytes) else unicode(s)
def maybe_utf8(s, errors="strict"):
"""Converts s to Unicode, assuming it is UTF-8. If s is already Unicode, it is
returned as is
def force_bytes(s, encoding, errors="strict"):
"""Converts s to bytes, using the provided encoding. If s is already bytes,
it is returned as is.
If errors="strict" and s is bytes, its encoding is verified by decoding it;
UnicodeError is raised if it cannot be decoded.
"""
return force_unicode(s, "utf-8", errors)
if isinstance(s, unicode):
return s.encode(encoding, errors)
else:
s = bytes(s)
if errors == "strict":
# Return value ignored - invoked solely for verification.
s.decode(encoding, errors)
return s
def force_str(s, encoding, errors="strict"):
"""Converts s to str (which is bytes on Python 2, and unicode on Python 3), using
the provided encoding if necessary. If s is already str, it is returned as is.
If errors="strict", str is bytes, and s is str, its encoding is verified by decoding
it; UnicodeError is raised if it cannot be decoded.
"""
return (force_bytes if str is bytes else force_unicode)(s, encoding, errors)
def force_ascii(s, errors="strict"):
"""Same as force_bytes(s, "ascii", errors)
"""
return force_bytes(s, "ascii", errors)
def force_utf8(s, errors="strict"):
"""Same as force_bytes(s, "utf8", errors)
"""
return force_bytes(s, "utf8", errors)
def filename(s, errors="strict"):
"""Ensures that filename is Unicode.
"""Same as force_unicode(s, sys.getfilesystemencoding(), errors)
"""
return force_unicode(s, sys.getfilesystemencoding(), errors)
def filename_bytes(s, errors="strict"):
"""Same as force_bytes(s, sys.getfilesystemencoding(), errors)
"""
return force_bytes(s, sys.getfilesystemencoding(), errors)
def nameof(obj, quote=False):
"""Returns the most descriptive name of a Python module, class, or function,
as a Unicode string.
as a Unicode string
If quote=True, name is quoted with repr().
Best-effort, but guaranteed to not fail - always returns something.
"""
try:
name = obj.__qualname__
except AttributeError:
except Exception:
try:
name = obj.__name__
except AttributeError:
except Exception:
# Fall back to raw repr(), and skip quoting.
try:
return maybe_utf8(repr(obj), "replace")
name = repr(obj)
except Exception:
return "<unknown>"
else:
quote = False
if quote:
name = repr(name)
return maybe_utf8(name, "replace")
try:
name = repr(name)
except Exception:
pass
return force_unicode(name, "utf-8", "replace")
def srcnameof(obj):
"""Returns the most descriptive name of a Python module, class, or function,
including source information (filename and linenumber), if available.
Best-effort, but guaranteed to not fail - always returns something.
"""
name = nameof(obj, quote=True)

View file

@ -24,10 +24,10 @@ class JsonObject(object):
representation via str() or format().
"""
json_encoder_type = json.JSONEncoder
json_encoder_factory = json.JSONEncoder
"""Used by __format__ when format_spec is not empty."""
json_encoder = json_encoder_type(indent=4)
json_encoder = json_encoder_factory(indent=4)
"""The default encoder used by __format__ when format_spec is empty."""
def __init__(self, value):
@ -42,8 +42,8 @@ class JsonObject(object):
def __format__(self, format_spec):
"""If format_spec is empty, uses self.json_encoder to serialize self.value
as a string. Otherwise, format_spec is treated as an argument list to be
passed to self.json_encoder_type - which defaults to JSONEncoder - and then
the resulting formatter is used to serialize self.value as a string.
passed to self.json_encoder_factory - which defaults to JSONEncoder - and
then the resulting formatter is used to serialize self.value as a string.
Example::
@ -54,11 +54,13 @@ class JsonObject(object):
# "indent=4,sort_keys=True". What we want is to build a function call
# from that which looks like:
#
# json_encoder_type(indent=4,sort_keys=True)
# json_encoder_factory(indent=4,sort_keys=True)
#
# which we can then eval() to create our encoder instance.
make_encoder = "json_encoder_type(" + format_spec + ")"
encoder = eval(make_encoder, {"json_encoder_type": self.json_encoder_type})
make_encoder = "json_encoder_factory(" + format_spec + ")"
encoder = eval(make_encoder, {
"json_encoder_factory": self.json_encoder_factory
})
else:
encoder = self.json_encoder
return encoder.encode(self.value)

View file

@ -31,6 +31,17 @@ class JsonIOStream(object):
MAX_BODY_SIZE = 0xFFFFFF
json_decoder_factory = json.JSONDecoder
"""Used by read_json() when decoder is None."""
json_encoder_factory = json.JSONEncoder
"""Used by write_json() when encoder is None."""
# @staticmethod
# def json_encoder_factory(*args, **kwargs):
# """Used by write_json() when encoder is None."""
# return json.JSONEncoder(*args, sort_keys=True, **kwargs)
@classmethod
def from_stdio(cls, name="stdio"):
"""Creates a new instance that receives messages from sys.stdin, and sends
@ -110,7 +121,7 @@ class JsonIOStream(object):
there are no more values to be read.
"""
decoder = decoder if decoder is not None else json.JSONDecoder()
decoder = decoder if decoder is not None else self.json_decoder_factory()
# If any error occurs while reading and parsing the message, log the original
# raw message data as is, so that it's possible to diagnose missing or invalid
@ -203,7 +214,7 @@ class JsonIOStream(object):
Value is written as encoded by encoder.encode().
"""
encoder = encoder if encoder is not None else json.JSONEncoder(sort_keys=True)
encoder = encoder if encoder is not None else self.json_encoder_factory()
# Format the value as a message, and try to log any failures using as much
# information as we already have at the point of the failure. For example,
@ -266,6 +277,9 @@ class MessageDict(collections.OrderedDict):
guarantee for outgoing messages.
"""
def __repr__(self):
return dict.__repr__(self)
def _invalid_if_no_key(func):
def wrap(self, key, *args, **kwargs):
try:
@ -1145,7 +1159,7 @@ class JsonMessageChannel(object):
del d.associate_with
message_dicts = []
decoder = json.JSONDecoder(object_hook=object_hook)
decoder = self.stream.json_decoder_factory(object_hook=object_hook)
message = self.stream.read_json(decoder)
assert isinstance(message, MessageDict) # make sure stream used decoder
@ -1155,7 +1169,7 @@ class JsonMessageChannel(object):
raise
except Exception:
raise log.exception(
"Fatal error while processing message for {0}:\n\n{1!r}",
"Fatal error while processing message for {0}:\n\n{1!j}",
self.name,
message,
)

View file

@ -7,10 +7,13 @@ from __future__ import absolute_import, print_function, unicode_literals
"""ptvsd tests
"""
import json
import pkgutil
import pytest
import py.path
# Do not import anything from ptvsd until assert rewriting is enabled below!
_tests_dir = py.path.local(__file__) / ".."
@ -29,6 +32,7 @@ Idiomatic use is via from .. import::
# tests will hang indefinitely if they time out.
__import__("pytest_timeout")
# We want pytest to rewrite asserts (for better error messages) in the common code
# code used by the tests, and in all the test helpers. This does not affect ptvsd
# inside debugged processes.
@ -44,8 +48,25 @@ for _, submodule, _ in tests_submodules:
submodule = str("{0}.{1}".format(__name__, submodule))
_register_assert_rewrite(submodule)
# Now we can import these, and pytest will rewrite asserts in them.
from ptvsd.common import fmt, log, messaging
# Enable full logging to stderr, and make timestamps shorter to match maximum test
# run time better.
from ptvsd.common import log
log.stderr_levels = set(log.LEVELS)
log.timestamp_format = "06.3f"
# Enable JSON serialization for py.path.local
class JSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, py.path.local):
return obj.strpath
return super(JSONEncoder, self).default(obj)
fmt.JsonObject.json_encoder = JSONEncoder(indent=4)
fmt.JsonObject.json_encoder_factory = JSONEncoder
messaging.JsonIOStream.json_encoder_factory = JSONEncoder

View file

@ -7,6 +7,7 @@ from __future__ import absolute_import, print_function, unicode_literals
"""Helpers to work with Python code.
"""
import py.path
import re
@ -14,20 +15,23 @@ def get_marked_line_numbers(path):
"""Given a path to a Python source file, extracts line numbers for all lines
that are marked with #@. For example, given this file::
print(1) #@foo
print(1) # @foo
print(2)
print(3) #@bar
print(3) # @bar
the function will return::
{'foo': 1, 'bar': 3}
{"foo": 1, "bar": 3}
"""
if isinstance(path, py.path.local):
path = path.strpath
with open(path) as f:
lines = {}
for i, line in enumerate(f):
match = re.search(r'#\s*@\s*(.*?)\s*$', line)
match = re.search(r"#\s*@\s*(.+?)\s*$", line)
if match:
marker = match.group(1)
lines[marker] = i + 1
return lines
return lines

View file

@ -18,7 +18,7 @@ import threading
import time
import ptvsd
from ptvsd.common import fmt, log, messaging
from ptvsd.common import compat, fmt, log, messaging
from tests import net, test_data
from tests.patterns import some
from tests.timeline import Timeline, Event, Response
@ -60,6 +60,7 @@ class Session(object):
self.id = next(self._counter)
log.info('Starting debug session {0} via {1!r}', self.id, start_method)
self.lock = threading.RLock()
self.target = ('code', 'print("OK")')
self.start_method = start_method
self.start_method_args = {}
@ -78,7 +79,7 @@ class Session(object):
self.env = os.environ.copy()
self.env.update(PTVSD_ENV)
self.env['PYTHONPATH'] = str(test_data / "_PYTHONPATH")
self.env['PYTHONPATH'] = (test_data / "_PYTHONPATH").strpath
self.env['PTVSD_SESSION_ID'] = str(self.id)
self.is_running = False
@ -90,10 +91,11 @@ class Session(object):
self.socket = None
self.server_socket = None
self.connected = threading.Event()
self._output_capture_threads = []
self.output_data = {'stdout': [], 'stderr': []}
self.backchannel = None
self._output_lines = {'stdout': [], 'stderr': []}
self._output_worker_threads = []
self.timeline = Timeline(ignore_unobserved=[
Event('output'),
Event('thread', some.dict.containing({'reason': 'exited'}))
@ -150,21 +152,30 @@ class Session(object):
self.timeline.ignore_unobserved = value
def close(self):
if self.socket:
try:
self.socket.shutdown(socket.SHUT_RDWR)
except Exception:
pass
self.socket = None
log.debug('Closed socket to {0}', self)
with self.lock:
if self.socket:
try:
self.socket.shutdown(socket.SHUT_RDWR)
except Exception:
pass
try:
self.socket.close()
except Exception:
pass
self.socket = None
log.debug('Closed socket to {0}', self)
if self.server_socket:
try:
self.server_socket.shutdown(socket.SHUT_RDWR)
except Exception:
pass
self.server_socket = None
log.debug('Closed server socket for {0}', self)
if self.server_socket:
try:
self.server_socket.shutdown(socket.SHUT_RDWR)
except Exception:
pass
try:
self.server_socket.close()
except Exception:
pass
self.server_socket = None
log.debug('Closed server socket for {0}', self)
if self.backchannel:
self.backchannel.close()
@ -174,22 +185,22 @@ class Session(object):
if self.kill_ptvsd:
try:
self._kill_process_tree()
except:
except Exception:
log.exception('Error killing {0} (pid={1}) process tree', self, self.pid)
log.info('Killed {0} (pid={1}) process tree', self, self.pid)
# Clean up pipes to avoid leaking OS handles.
try:
self.process.stdin.close()
except:
except Exception:
pass
try:
self.process.stdout.close()
except:
except Exception:
pass
try:
self.process.stderr.close()
except:
except Exception:
pass
self._wait_for_remaining_output()
@ -200,21 +211,21 @@ class Session(object):
def _get_argv_for_launch(self):
argv = [sys.executable]
argv += [str(PTVSD_DIR)]
argv += [PTVSD_DIR.strpath]
argv += ['--client']
argv += ['--host', 'localhost', '--port', str(self.ptvsd_port)]
return argv
def _get_argv_for_attach_using_cmdline(self):
argv = [sys.executable]
argv += [str(PTVSD_DIR)]
argv += [PTVSD_DIR.strpath]
argv += ['--wait']
argv += ['--host', 'localhost', '--port', str(self.ptvsd_port)]
return argv
def _get_argv_for_attach_using_pid(self):
argv = [sys.executable]
argv += [str(PTVSD_DIR)]
argv += [PTVSD_DIR.strpath]
argv += ['--client', '--host', 'localhost', '--port', str(self.ptvsd_port)]
# argv += ['--pid', '<pid>'] # pid value to be appended later
return argv
@ -236,6 +247,8 @@ class Session(object):
def _get_target(self):
argv = []
run_as, path_or_code = self.target
if isinstance(path_or_code, py.path.local):
path_or_code = path_or_code.strpath
if run_as == 'file':
self._validate_pyfile(path_or_code)
argv += [path_or_code]
@ -245,7 +258,7 @@ class Session(object):
if os.path.isfile(path_or_code) or os.path.isdir(path_or_code):
self.env['PYTHONPATH'] += os.pathsep + os.path.dirname(path_or_code)
try:
module = path_or_code[len(os.path.dirname(path_or_code)) + 1:-3]
module = path_or_code[(len(os.path.dirname(path_or_code)) + 1) : -3]
except Exception:
module = 'code_to_debug'
argv += ['-m', module]
@ -318,7 +331,7 @@ class Session(object):
elif self.start_method == 'attach_socket_import':
dbg_argv += self._get_argv_for_attach_using_import()
# TODO: Remove adding to python path after enabling Tox
self.env['PYTHONPATH'] = str(PTVSD_DIR / "..") + os.pathsep + self.env['PYTHONPATH']
self.env['PYTHONPATH'] = (PTVSD_DIR / "..").strpath + os.pathsep + self.env['PYTHONPATH']
self.env['PTVSD_DEBUG_ME'] = fmt(PTVSD_DEBUG_ME, ptvsd_port=self.ptvsd_port)
elif self.start_method == 'attach_pid':
self._listen()
@ -354,13 +367,22 @@ class Session(object):
self.backchannel.listen()
self.env['PTVSD_BACKCHANNEL_PORT'] = str(self.backchannel.port)
# Force env to use str everywhere - this is needed for Python 2.7 on Windows.
env = {str(k): str(v) for k, v in self.env.items()}
# Force env to use str everywhere - this is needed for Python 2.7.
# Assume that values are filenames - it's usually either that, or numbers.
env = {
compat.force_str(k, "ascii"): compat.filename(v)
for k, v in self.env.items()
}
env_str = "\n".join((
fmt("{0}={1}", env_name, env[env_name])
for env_name in sorted(self.env.keys())
for env_name in sorted(env.keys())
))
cwd = self.cwd
if isinstance(cwd, py.path.local):
cwd = cwd.strpath
log.info(
'{0} will have:\n\n'
'ptvsd: {1}\n'
@ -380,6 +402,9 @@ class Session(object):
)
spawn_args = usr_argv if self.start_method == 'attach_pid' else dbg_argv
# Force args to use str everywhere - this is needed for Python 2.7.
spawn_args = [compat.filename(s) for s in spawn_args]
log.info('Spawning {0}: {1!j}', self, spawn_args)
self.process = subprocess.Popen(
spawn_args,
@ -387,7 +412,8 @@ class Session(object):
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=self.cwd)
cwd=cwd,
)
self.pid = self.process.pid
self.psutil_process = psutil.Process(self.pid)
self.is_running = True
@ -435,7 +461,7 @@ class Session(object):
if close:
self.timeline.close()
def wait_for_termination(self):
def wait_for_termination(self, close=False):
log.info('Waiting for {0} to terminate', self)
# BUG: ptvsd sometimes exits without sending 'terminate' or 'exited', likely due to
@ -457,7 +483,8 @@ class Session(object):
if Event('terminated') in self:
self.expect_realized(Event('exited') >> Event('terminated', {}))
self.timeline.close()
if close:
self.timeline.close()
def wait_for_exit(self):
"""Waits for the spawned ptvsd process to exit. If it doesn't exit within
@ -470,10 +497,12 @@ class Session(object):
assert self.psutil_process is not None
killed = []
def kill():
time.sleep(self.WAIT_FOR_EXIT_TIMEOUT)
if self.is_running:
log.warning('{0!r} (pid={1}) timed out, killing it', self, self.pid)
killed[:] = [True]
self._kill_process_tree()
kill_thread = threading.Thread(target=kill, name=fmt('{0} watchdog (pid={1})', self, self.pid))
@ -483,22 +512,23 @@ class Session(object):
log.info('Waiting for {0} (pid={1}) to terminate', self, self.pid)
returncode = self.psutil_process.wait()
assert not killed, "wait_for_exit() timed out"
assert returncode == self.expected_returncode
self.is_running = False
self.wait_for_termination()
self.wait_for_termination(close=not killed)
def _kill_process_tree(self):
assert self.psutil_process is not None
procs = [self.psutil_process]
try:
procs += self.psutil_process.children(recursive=True)
except:
except Exception:
pass
for p in procs:
try:
p.kill()
except:
except Exception:
pass
def _listen(self):
@ -508,11 +538,35 @@ class Session(object):
self.server_socket.listen(0)
def accept_worker():
with self.lock:
server_socket = self.server_socket
if server_socket is None:
return
log.info('Listening for incoming connection from {0} on port {1}...', self, self.ptvsd_port)
self.socket, _ = self.server_socket.accept()
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
try:
sock, _ = server_socket.accept()
except Exception:
log.exception()
return
log.info('Incoming connection from {0} accepted.', self)
self._setup_channel()
try:
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
with self.lock:
if self.server_socket is not None:
self.socket = sock
sock = None
self._setup_channel()
else:
# self.close() has been called concurrently.
pass
finally:
if sock is not None:
try:
sock.close()
except Exception:
pass
accept_thread = threading.Thread(target=accept_worker, name=fmt('{0} listener', self))
accept_thread.daemon = True
@ -641,14 +695,15 @@ class Session(object):
def _capture_output(self, pipe, name):
thread = threading.Thread(
target=lambda: self._capture_output_worker(pipe, name),
target=lambda: self._output_worker(pipe, name),
name=fmt("{0} {1}", self, name)
)
thread.daemon = True
thread.start()
self._output_capture_threads.append(thread)
self._output_worker_threads.append(thread)
def _capture_output_worker(self, pipe, name):
def _output_worker(self, pipe, name):
output_lines = self._output_lines[name]
while True:
try:
line = pipe.readline()
@ -657,13 +712,57 @@ class Session(object):
if line:
log.info("{0} {1}> {2}", self, name, line.rstrip())
self.output_data[name].append(line)
with self.lock:
output_lines.append(line)
else:
break
def _wait_for_remaining_output(self, timeout=None):
for thread in self._output_capture_threads:
thread.join(timeout)
for t in self._output_worker_threads:
t.join(timeout)
def _output(self, which, encoding, lines):
assert self.timeline.is_frozen
with self.lock:
result = list(self._output_lines[which])
if encoding is not None:
for i, s in enumerate(result):
result[i] = s.decode(encoding)
if not lines:
sep = b'' if encoding is None else u''
result = sep.join(result)
return result
def stdout(self, encoding=None):
"""Returns stdout captured from the debugged process, as a single string.
If encoding is None, returns bytes. Otherwise, returns unicode.
"""
return self._output("stdout", encoding, lines=False)
def stderr(self, encoding=None):
"""Returns stderr captured from the debugged process, as a single string.
If encoding is None, returns bytes. Otherwise, returns unicode.
"""
return self._output("stderr", encoding, lines=False)
def stdout_lines(self, encoding=None):
"""Returns stdout captured from the debugged process, as a list of lines.
If encoding is None, each line is bytes. Otherwise, each line is unicode.
"""
return self._output("stdout", encoding, lines=True)
def stderr_lines(self, encoding=None):
"""Returns stderr captured from the debugged process, as a list of lines.
If encoding is None, each line is bytes. Otherwise, each line is unicode.
"""
return self._output("stderr", encoding, lines=True)
def request_continue(self):
self.send_request('continue').wait_for_response(freeze=False)
@ -718,7 +817,7 @@ class Session(object):
child_session.rules = self.rules
child_session.connect()
child_session.handshake()
except:
except Exception:
child_session.close()
raise
else:
@ -728,12 +827,6 @@ class Session(object):
ptvsd_subprocess = self.wait_for_next(Event('ptvsd_subprocess'))
return self.connect_to_child_session(ptvsd_subprocess)
def get_stdout_as_string(self):
return b''.join(self.output_data['stdout'])
def get_stderr_as_string(self):
return b''.join(self.output_data['stderr'])
def connect_with_new_session(self, **kwargs):
ns = Session(start_method='attach_socket_import', ptvsd_port=self.ptvsd_port)
try:
@ -750,7 +843,7 @@ class Session(object):
ns.connect()
ns.connected.wait()
ns.handshake()
except:
except Exception:
ns.close()
else:
return ns
@ -803,6 +896,9 @@ class BackChannel(object):
self._established.wait()
return self._stream.read_json()
def expect(self, value):
assert self.receive() == value
def send(self, value):
self._established.wait()
self.session.timeline.unfreeze()

View file

@ -11,11 +11,13 @@ from __future__ import absolute_import, print_function, unicode_literals
# builtin names like str, int etc without affecting the implementations in this
# file - some.* then provides shorthand aliases.
import itertools
import py.path
import re
import sys
from ptvsd.common import compat, fmt
from ptvsd.common.compat import unicode
from ptvsd.common.compat import unicode, xrange
import pydevd_file_utils
@ -45,6 +47,11 @@ class Some(object):
"""
return Either(self, pattern)
def such_that(self, condition):
"""Same pattern, but it only matches if condition() is true.
"""
return SuchThat(self, condition)
def in_range(self, start, stop):
"""Same pattern, but it only matches if the start <= value < stop.
"""
@ -229,29 +236,64 @@ class Path(Some):
"""
def __init__(self, path):
if isinstance(path, py.path.local):
path = path.strpath
if isinstance(path, bytes):
path = path.encode(sys.getfilesystemencoding())
assert isinstance(path, unicode)
self.path = path
def __repr__(self):
return fmt("some.path({0!r})", self.path)
def __eq__(self, other):
if not (isinstance(other, bytes) or isinstance(other, unicode)):
if isinstance(other, py.path.local):
other = other.strpath
if isinstance(other, unicode):
pass
elif isinstance(other, bytes):
other = other.encode(sys.getfilesystemencoding())
else:
return NotImplemented
left, right = self.path, other
# If there's a unicode/bytes mismatch, make both unicode.
if isinstance(left, unicode):
if not isinstance(right, unicode):
right = right.decode(sys.getfilesystemencoding())
elif isinstance(right, unicode):
right = right.encode(sys.getfilesystemencoding())
left = pydevd_file_utils.get_path_with_real_case(left)
right = pydevd_file_utils.get_path_with_real_case(right)
left = pydevd_file_utils.get_path_with_real_case(self.path)
right = pydevd_file_utils.get_path_with_real_case(other)
return left == right
class ListContaining(Some):
"""Matches any list that contains the specified subsequence of elements.
"""
def __init__(self, *items):
self.items = tuple(items)
def __repr__(self):
if not self.items:
return "[...]"
s = repr(list(self.items))
return fmt("[..., {0}, ...]", s[1:-1])
def __eq__(self, other):
if not isinstance(other, list):
return NotImplemented
items = self.items
if not items:
return True # every list contains an empty sequence
if len(items) == 1:
return self.items[0] in other
# Zip the other list with itself, shifting by one every time, to produce
# tuples of equal length with items - i.e. all potential subsequences. So,
# given other=[1, 2, 3, 4, 5] and items=(2, 3, 4), we want to get a list
# like [(1, 2, 3), (2, 3, 4), (3, 4, 5)] - and then search for items in it.
iters = [itertools.islice(other, i, None) for i in xrange(0, len(items))]
subseqs = compat.izip(*iters)
return any(subseq == items for subseq in subseqs)
class DictContaining(Some):
"""Matches any dict that contains the specified key-value pairs::

View file

@ -65,15 +65,17 @@ Usage::
__all__ = [
"bool",
"dap_id",
"dict",
"error",
"instanceof",
"int",
"list",
"number",
"path",
"source",
"str",
"such_that",
"thing",
"tuple",
]
import numbers
@ -83,7 +85,6 @@ from ptvsd.common.compat import builtins
from tests import patterns as some
such_that = some.SuchThat
object = some.Object()
thing = some.Thing()
instanceof = some.InstanceOf
@ -93,6 +94,7 @@ path = some.Path
bool = instanceof(builtins.bool)
number = instanceof(numbers.Real, "number")
int = instanceof(numbers.Integral, "int")
tuple = instanceof(builtins.tuple)
error = instanceof(Exception)
@ -106,6 +108,10 @@ else:
str.matching = some.StrMatching
list = instanceof(builtins.list)
list.containing = some.ListContaining
dict = instanceof(builtins.dict)
dict.containing = some.DictContaining

View file

@ -32,6 +32,9 @@ class JsonMemoryStream(object):
For output, values are appended to the supplied collection.
"""
json_decoder_factory = messaging.JsonIOStream.json_decoder_factory
json_encoder_factory = messaging.JsonIOStream.json_encoder_factory
def __init__(self, input, output, name="memory"):
self.name = name
self.input = iter(input)
@ -41,7 +44,7 @@ class JsonMemoryStream(object):
pass
def read_json(self, decoder=None):
decoder = decoder if decoder is not None else json.JSONDecoder()
decoder = decoder if decoder is not None else self.json_decoder_factory()
try:
value = next(self.input)
except StopIteration:
@ -49,7 +52,7 @@ class JsonMemoryStream(object):
return decoder.decode(json.dumps(value))
def write_json(self, value, encoder=None):
encoder = encoder if encoder is not None else json.JSONEncoder()
encoder = encoder if encoder is not None else self.json_encoder_factory()
value = json.loads(encoder.encode(value))
self.output.append(value)

View file

@ -15,7 +15,7 @@ from tests.timeline import Event
@pytest.mark.parametrize("is_attached", ["attachCheckOn", "attachCheckOff"])
@pytest.mark.parametrize("break_into", ["break", "pause"])
def test_attach(run_as, wait_for_attach, is_attached, break_into):
attach1_py = str(test_data / "attach" / "attach1.py")
attach1_py = test_data / "attach" / "attach1.py"
lines = code.get_marked_line_numbers(attach1_py)
with debug.Session() as session:
env = {
@ -29,6 +29,7 @@ def test_attach(run_as, wait_for_attach, is_attached, break_into):
if break_into == "break":
env["PTVSD_BREAK_INTO_DBG"] = "1"
backchannel = session.setup_backchannel()
session.initialize(
target=(run_as, attach1_py),
start_method="launch",
@ -38,18 +39,18 @@ def test_attach(run_as, wait_for_attach, is_attached, break_into):
session.start_debugging()
if wait_for_attach == "waitOn":
assert session.read_json() == "wait_for_attach"
assert backchannel.receive() == "wait_for_attach"
if is_attached == "attachCheckOn":
assert session.read_json() == "is_attached"
assert backchannel.receive() == "is_attached"
if break_into == "break":
assert session.read_json() == "break_into_debugger"
assert backchannel.receive() == "break_into_debugger"
hit = session.wait_for_stop()
assert lines["bp"] == hit.frames[0]["line"]
else:
# pause test
session.write_json("pause_test")
backchannel.send("pause_test")
session.send_request("pause").wait_for_response(freeze=False)
hit = session.wait_for_stop(reason="pause")
# Note: no longer asserting line as it can even stop on different files
@ -72,13 +73,14 @@ def test_reattach(pyfile, start_method, run_as):
ptvsd.break_into_debugger()
print("first") # @first
backchannel.write_json("continued")
backchannel.send("continued")
for _ in range(0, 100):
time.sleep(0.1)
ptvsd.break_into_debugger()
print("second") # @second
with debug.Session() as session:
backchannel = session.setup_backchannel()
session.initialize(
target=(run_as, code_to_debug),
start_method=start_method,
@ -91,7 +93,7 @@ def test_reattach(pyfile, start_method, run_as):
assert code_to_debug.lines["first"] == hit.frames[0]["line"]
session.send_request("disconnect").wait_for_response(freeze=False)
session.wait_for_disconnect()
assert session.read_json() == "continued"
assert backchannel.receive() == "continued"
# re-attach
with session.connect_with_new_session(target=(run_as, code_to_debug)) as session2:

View file

@ -20,7 +20,7 @@ BP_TEST_ROOT = test_data / "bp"
def test_path_with_ampersand(start_method, run_as):
test_py = str(BP_TEST_ROOT / "a&b" / "test.py")
test_py = BP_TEST_ROOT / "a&b" / "test.py"
lines = code.get_marked_line_numbers(test_py)
with debug.Session(start_method) as session:
@ -56,7 +56,7 @@ def test_path_with_unicode(start_method, run_as):
assert hit.frames[0]["source"]["path"] == some.path(test_py)
assert "ಏನಾದರೂ_ಮಾಡು" == hit.frames[0]["name"]
session.send_continue()
session.request_continue()
session.wait_for_exit()
@ -126,10 +126,10 @@ def test_conditional_breakpoint(pyfile, start_method, run_as, condition_key):
)
]
session.send_continue()
session.request_continue()
for i in range(1, hits):
session.wait_for_stop()
session.send_continue()
session.request_continue()
session.wait_for_exit()
@ -159,12 +159,12 @@ def test_crossfile_breakpoint(pyfile, start_method, run_as):
assert script2.lines["bp"] == hit.frames[0]["line"]
assert hit.frames[0]["source"]["path"] == some.path(script2)
session.send_continue()
session.request_continue()
hit = session.wait_for_stop()
assert script1.lines["bp"] == hit.frames[0]["line"]
assert hit.frames[0]["source"]["path"] == some.path(script1)
session.send_continue()
session.request_continue()
session.wait_for_exit()
@ -241,7 +241,7 @@ def test_log_point(pyfile, start_method, run_as):
hit = session.wait_for_stop()
assert lines["end"] == hit.frames[0]["line"]
session.send_continue()
session.request_continue()
session.wait_for_exit()
assert session.get_stderr_as_string() == b""
@ -309,12 +309,12 @@ def test_condition_with_log_point(pyfile, start_method, run_as):
)
]
session.send_continue()
session.request_continue()
# Breakpoint at the end just to make sure we get all output events.
hit = session.wait_for_stop()
assert lines["end"] == hit.frames[0]["line"]
session.send_continue()
session.request_continue()
session.wait_for_exit()
assert session.get_stderr_as_string() == b""
@ -332,7 +332,7 @@ def test_condition_with_log_point(pyfile, start_method, run_as):
def test_package_launch():
cwd = test_data / "testpkgs"
test_py = os.path.join(cwd, "pkg1", "__main__.py")
test_py = cwd / "pkg1" / "__main__.py"
lines = code.get_marked_line_numbers(test_py)
with debug.Session() as session:
@ -343,7 +343,7 @@ def test_package_launch():
hit = session.wait_for_stop()
assert lines["two"] == hit.frames[0]["line"]
session.send_continue()
session.request_continue()
session.wait_for_exit()
@ -354,10 +354,11 @@ def test_add_and_remove_breakpoint(pyfile, start_method, run_as):
for i in range(0, 10):
print(i) # @bp
backchannel.read_json()
backchannel.receive()
lines = code_to_debug.lines
with debug.Session() as session:
backchannel = session.setup_backchannel()
session.initialize(
target=(run_as, code_to_debug),
start_method=start_method,
@ -371,12 +372,12 @@ def test_add_and_remove_breakpoint(pyfile, start_method, run_as):
# remove breakpoints in file
session.set_breakpoints(code_to_debug, [])
session.send_continue()
session.request_continue()
session.wait_for_next(
Event("output", some.dict.containing({"category": "stdout", "output": "9"}))
)
session.write_json("done")
backchannel.send("done")
session.wait_for_exit()
output = session.all_occurrences_of(

View file

@ -69,7 +69,7 @@ def test_completions_scope(pyfile, bp_label, start_method, run_as):
).wait_for_response()
targets = resp_completions.body["targets"]
session.send_continue()
session.request_continue()
targets.sort(key=lambda t: t["label"])
expected.sort(key=lambda t: t["label"])
@ -136,5 +136,5 @@ def test_completions_cases(pyfile, start_method, run_as):
).wait_for_response()
assert "Wrong ID sent from the client:" in str(error)
session.send_continue()
session.request_continue()
session.wait_for_exit()

View file

@ -20,9 +20,10 @@ def test_continue_on_disconnect_for_attach(pyfile, start_method, run_as):
def code_to_debug():
from debug_me import backchannel
backchannel.write_json("continued") # @bp
backchannel.send("continued") # @bp
with debug.Session() as session:
backchannel = session.setup_backchannel()
session.initialize(
target=(run_as, code_to_debug),
start_method=start_method,
@ -35,7 +36,7 @@ def test_continue_on_disconnect_for_attach(pyfile, start_method, run_as):
assert hit.frames[0]["line"] == code_to_debug.lines["bp"]
session.send_request("disconnect").wait_for_response()
session.wait_for_disconnect()
assert "continued" == session.read_json()
assert "continued" == backchannel.receive()
@pytest.mark.parametrize("start_method", ["launch"])

View file

@ -80,7 +80,7 @@ def test_django_breakpoint_no_multiproc(start_method, bp_target):
}
]
session.send_continue()
session.request_continue()
assert bp_var_content in home_request.response_text()
session.wait_for_exit()
@ -150,11 +150,11 @@ def test_django_template_exception_no_multiproc(start_method):
}
)
session.send_continue()
session.request_continue()
# And a second time when the exception reaches the user code.
hit = session.wait_for_stop(reason="exception")
session.send_continue()
session.request_continue()
# ignore response for exception tests
web_request.wait_for_response()
@ -239,7 +239,7 @@ def test_django_exception_no_multiproc(ex_type, start_method):
"column": 1,
}
session.send_continue()
session.request_continue()
# ignore response for exception tests
web_request.wait_for_response()
@ -337,7 +337,7 @@ def test_django_breakpoint_multiproc(start_method):
}
]
child_session.send_continue()
child_session.request_continue()
web_content = web_request.wait_for_response()
assert web_content.find(bp_var_content) != -1

View file

@ -54,14 +54,14 @@ def test_variables_and_evaluate(pyfile, start_method, run_as):
assert b_variables[0] == {
"type": "int",
"value": "1",
"name": some.str.such_that(lambda x: x.find("one") > 0),
"name": some.str.matching(r".*one.*"),
"evaluateName": "b['one']",
"variablesReference": 0,
}
assert b_variables[1] == {
"type": "int",
"value": "2",
"name": some.str.such_that(lambda x: x.find("two") > 0),
"name": some.str.matching(r".*two.*"),
"evaluateName": "b['two']",
"variablesReference": 0,
}
@ -99,7 +99,7 @@ def test_variables_and_evaluate(pyfile, start_method, run_as):
{"type": "int", "result": "2"}
)
session.send_continue()
session.request_continue()
session.wait_for_exit()
@ -110,9 +110,10 @@ def test_set_variable(pyfile, start_method, run_as):
a = 1
ptvsd.break_into_debugger()
backchannel.write_json(a)
backchannel.send(a)
with debug.Session() as session:
backchannel = session.setup_backchannel()
session.initialize(
target=(run_as, code_to_debug),
start_method=start_method,
@ -155,9 +156,9 @@ def test_set_variable(pyfile, start_method, run_as):
{"type": "int", "value": "1000"}
)
session.send_continue()
session.request_continue()
assert session.read_json() == 1000
assert backchannel.receive() == 1000
session.wait_for_exit()
@ -247,7 +248,7 @@ def test_variable_sort(pyfile, start_method, run_as):
# NOTE: this is commented out due to sorting bug #213
# assert variable_names[:3] == ['1', '2', '10']
session.send_continue()
session.request_continue()
session.wait_for_exit()
@ -273,7 +274,7 @@ def test_return_values(pyfile, start_method, run_as):
"value": "'did something'",
"type": "str",
"presentationHint": some.dict.containing(
{"attributes": some.str.such_that(lambda x: "readOnly" in x)}
{"attributes": some.list.containing("readOnly")}
),
}
)
@ -284,7 +285,7 @@ def test_return_values(pyfile, start_method, run_as):
"value": "'did more things'",
"type": "str",
"presentationHint": some.dict.containing(
{"attributes": some.str.such_that(lambda x: "readOnly" in x)}
{"attributes": some.list.containing("readOnly")}
),
}
)
@ -370,7 +371,7 @@ def test_unicode(pyfile, start_method, run_as):
else:
assert resp_eval.body == some.dict.containing({"type": "SyntaxError"})
session.send_continue()
session.request_continue()
session.wait_for_exit()
@ -595,5 +596,5 @@ def test_hex_numbers(pyfile, start_method, run_as):
},
]
session.send_continue()
session.request_continue()
session.wait_for_exit()

View file

@ -11,6 +11,9 @@ from tests.patterns import some
from tests.timeline import Event
str_matching_ArithmeticError = some.str.matching(r"($|.*\.)ArithmeticError")
@pytest.mark.parametrize("raised", ["raisedOn", "raisedOff"])
@pytest.mark.parametrize("uncaught", ["uncaughtOn", "uncaughtOff"])
def test_vsc_exception_options_raise_with_except(
@ -41,16 +44,12 @@ def test_vsc_exception_options_raise_with_except(
expected = some.dict.containing(
{
"exceptionId": some.str.such_that(
lambda s: s.endswith("ArithmeticError")
),
"exceptionId": str_matching_ArithmeticError,
"description": "bad code",
"breakMode": "always" if raised == "raisedOn" else "unhandled",
"details": some.dict.containing(
{
"typeName": some.str.such_that(
lambda s: s.endswith("ArithmeticError")
),
"typeName": str_matching_ArithmeticError,
"message": "bad code",
"source": some.path(code_to_debug),
}
@ -61,7 +60,7 @@ def test_vsc_exception_options_raise_with_except(
if raised == "raisedOn":
hit = session.wait_for_stop(
reason="exception",
text=some.str.such_that(lambda s: s.endswith("ArithmeticError")),
text=str_matching_ArithmeticError,
description="bad code",
)
assert ex_line == hit.frames[0]["line"]
@ -71,7 +70,7 @@ def test_vsc_exception_options_raise_with_except(
).wait_for_response()
assert resp_exc_info.body == expected
session.send_continue()
session.request_continue()
# uncaught should not 'stop' matter since the exception is caught
@ -110,16 +109,12 @@ def test_vsc_exception_options_raise_without_except(
expected = some.dict.containing(
{
"exceptionId": some.str.such_that(
lambda s: s.endswith("ArithmeticError")
),
"exceptionId": str_matching_ArithmeticError,
"description": "bad code",
"breakMode": "always" if raised == "raisedOn" else "unhandled",
"details": some.dict.containing(
{
"typeName": some.str.such_that(
lambda s: s.endswith("ArithmeticError")
),
"typeName": str_matching_ArithmeticError,
"message": "bad code",
"source": some.path(code_to_debug),
}
@ -136,14 +131,14 @@ def test_vsc_exception_options_raise_without_except(
).wait_for_response()
assert resp_exc_info.body == expected
session.send_continue()
session.request_continue()
# NOTE: debugger stops at each frame if raised and is uncaught
# This behavior can be changed by updating 'notify_on_handled_exceptions'
# setting we send to pydevd to notify only once. In our test code, we have
# two frames, hence two stops.
session.wait_for_stop(reason="exception")
session.send_continue()
session.request_continue()
if uncaught == "uncaughtOn":
hit = session.wait_for_stop(reason="exception")
@ -155,16 +150,12 @@ def test_vsc_exception_options_raise_without_except(
expected = some.dict.containing(
{
"exceptionId": some.str.such_that(
lambda s: s.endswith("ArithmeticError")
),
"exceptionId": str_matching_ArithmeticError,
"description": "bad code",
"breakMode": "unhandled", # Only difference from previous expected is breakMode.
"details": some.dict.containing(
{
"typeName": some.str.such_that(
lambda s: s.endswith("ArithmeticError")
),
"typeName": str_matching_ArithmeticError,
"message": "bad code",
"source": some.path(code_to_debug),
}
@ -173,7 +164,7 @@ def test_vsc_exception_options_raise_without_except(
)
assert resp_exc_info.body == expected
session.send_continue()
session.request_continue()
session.wait_for_exit()
@ -223,11 +214,11 @@ def test_systemexit(pyfile, start_method, run_as, raised, uncaught, zero, exit_c
if raised and (zero or exit_code != 0):
hit = session.wait_for_stop(reason="exception")
assert hit.frames[0]["line"] == line_numbers["handled"]
session.send_continue()
session.request_continue()
hit = session.wait_for_stop(reason="exception")
assert hit.frames[0]["line"] == line_numbers["unhandled"]
session.send_continue()
session.request_continue()
# When breaking on uncaught exceptions, we'll stop on the second line,
# unless it's SystemExit(0) and we asked to ignore that.
@ -238,7 +229,7 @@ def test_systemexit(pyfile, start_method, run_as, raised, uncaught, zero, exit_c
if uncaught and (zero or exit_code != 0):
hit = session.wait_for_stop(reason="exception")
assert hit.frames[0]["line"] == line_numbers["unhandled"]
session.send_continue()
session.request_continue()
session.wait_for_exit()
@ -326,7 +317,7 @@ def test_raise_exception_options(pyfile, start_method, run_as, exceptions, break
hit = session.wait_for_stop(reason="exception")
assert hit.frames[0]["source"]["path"].endswith("code_to_debug.py")
assert hit.frames[0]["line"] == code_to_debug.lines[expected_exception]
session.send_continue()
session.request_continue()
session.wait_for_exit()
@ -357,7 +348,7 @@ def test_success_exitcodes(pyfile, start_method, run_as, exit_code):
if exit_code == 0:
session.wait_for_stop(reason="exception")
session.send_continue()
session.request_continue()
session.wait_for_exit()
@ -414,12 +405,12 @@ def test_exception_stack(pyfile, start_method, run_as, max_frames):
expected = some.dict.containing(
{
"exceptionId": some.matching("ArithmeticError"),
"exceptionId": some.str.matching(ArithmeticError.__name__),
"description": "bad code",
"breakMode": "unhandled",
"details": some.dict.containing(
{
"typeName": some.matching("ArithmeticError"),
"typeName": some.str.matching(ArithmeticError.__name__),
"message": "bad code",
"source": some.path(code_to_debug),
}
@ -431,6 +422,6 @@ def test_exception_stack(pyfile, start_method, run_as, max_frames):
stack_line_count = len(stack_str.split("\n"))
assert min_expected_lines <= stack_line_count <= max_expected_lines
session.send_continue()
session.request_continue()
session.wait_for_exit()

View file

@ -7,7 +7,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import os.path
import pytest
from tests import debug, test_data
from tests import debug, log, test_data
from tests.patterns import some
@ -38,11 +38,12 @@ def test_exceptions_and_exclude_rules(
raise AssertionError("Unexpected exception_type: %s" % (exception_type,))
if scenario == "exclude_by_name":
rules = [{"path": "**/" + os.path.basename(code_to_debug), "include": False}]
rules = [{"path": "**/" + code_to_debug.basename, "include": False}]
elif scenario == "exclude_by_dir":
rules = [{"path": os.path.dirname(code_to_debug), "include": False}]
rules = [{"path": code_to_debug.dirname, "include": False}]
else:
raise AssertionError("Unexpected scenario: %s" % (scenario,))
pytest.fail(scenario)
log.info("Rules: {0!j}", rules)
with debug.Session() as session:
session.initialize(
@ -70,7 +71,7 @@ def test_exceptions_and_partial_exclude_rules(pyfile, start_method, run_as, scen
from debug_me import backchannel
import sys
json = backchannel.read_json()
json = backchannel.receive()
call_me_back_dir = json["call_me_back_dir"]
sys.path.append(call_me_back_dir)
@ -86,17 +87,18 @@ def test_exceptions_and_partial_exclude_rules(pyfile, start_method, run_as, scen
call_me_back_dir = test_data / "call_me_back"
if scenario == "exclude_code_to_debug":
rules = [{"path": "**/" + os.path.basename(code_to_debug), "include": False}]
rules = [{"path": "**/" + code_to_debug.basename, "include": False}]
elif scenario == "exclude_callback_dir":
rules = [{"path": call_me_back_dir, "include": False}]
else:
raise AssertionError("Unexpected scenario: %s" % (scenario,))
pytest.fail(scenario)
log.info("Rules: {0!j}", rules)
with debug.Session() as session:
backchannel = session.setup_backchannel()
session.initialize(
target=(run_as, code_to_debug),
start_method=start_method,
use_backchannel=True,
rules=rules,
)
# TODO: The process returncode doesn't match the one returned from the DAP.
@ -108,7 +110,7 @@ def test_exceptions_and_partial_exclude_rules(pyfile, start_method, run_as, scen
"setExceptionBreakpoints", {"filters": filters}
).wait_for_response()
session.start_debugging()
session.write_json({"call_me_back_dir": call_me_back_dir})
backchannel.send({"call_me_back_dir": call_me_back_dir})
if scenario == "exclude_code_to_debug":
# Stop at handled
@ -135,7 +137,7 @@ def test_exceptions_and_partial_exclude_rules(pyfile, start_method, run_as, scen
# })
# })
# 'continue' should terminate the debuggee
session.send_continue()
session.request_continue()
# Note: does not stop at unhandled exception because raise was in excluded file.
@ -189,8 +191,8 @@ def test_exceptions_and_partial_exclude_rules(pyfile, start_method, run_as, scen
"source": some.dict.containing({"path": some.path(code_to_debug)}),
}
)
session.send_continue()
session.request_continue()
else:
raise AssertionError("Unexpected scenario: %s" % (scenario,))
pytest.fail(scenario)
session.wait_for_exit()

View file

@ -98,7 +98,7 @@ def test_flask_breakpoint_no_multiproc(bp_target, start_method):
}
]
session.send_continue()
session.request_continue()
assert bp_var_content in home_request.response_text()
session.wait_for_exit()
@ -164,7 +164,7 @@ def test_flask_template_exception_no_multiproc(start_method):
}
)
session.send_continue()
session.request_continue()
# ignore response for exception tests
web_request.wait_for_response()
@ -234,7 +234,7 @@ def test_flask_exception_no_multiproc(ex_type, start_method):
"column": 1,
}
session.send_continue()
session.request_continue()
# ignore response for exception tests
web_request.wait_for_response()
@ -320,7 +320,7 @@ def test_flask_breakpoint_multiproc(start_method):
}
]
child_session.send_continue()
child_session.request_continue()
assert bp_var_content in web_request.response_text()
child_session.wait_for_termination()

View file

@ -56,6 +56,6 @@ def test_justmycode_frames(pyfile, start_method, run_as, jmc):
assert hit2.frames[0]["source"]["path"] != some.path(code_to_debug)
# 'continue' should terminate the debuggee
session.send_continue()
session.request_continue()
session.wait_for_exit()

View file

@ -59,17 +59,18 @@ def test_multiprocessing(pyfile, start_method, run_as):
p = multiprocessing.Process(target=child, args=(q,))
p.start()
print("child spawned")
backchannel.write_json(p.pid)
backchannel.send(p.pid)
q.put(1)
assert backchannel.read_json() == "continue"
assert backchannel.receive() == "continue"
q.put(2)
p.join()
assert q.get() == 4
q.close()
backchannel.write_json("done")
backchannel.send("done")
with debug.Session() as parent_session:
parent_backchannel = parent_session.setup_backchannel()
parent_session.initialize(
multiprocess=True,
target=(run_as, code_to_debug),
@ -84,7 +85,7 @@ def test_multiprocessing(pyfile, start_method, run_as):
root_process, = parent_session.all_occurrences_of(Event("process"))
root_pid = int(root_process.body["systemProcessId"])
child_pid = parent_session.read_json()
child_pid = parent_backchannel.receive()
child_subprocess = parent_session.wait_for_next(Event("ptvsd_subprocess"))
assert child_subprocess == Event(
@ -132,12 +133,12 @@ def test_multiprocessing(pyfile, start_method, run_as):
) as grandchild_session:
grandchild_session.start_debugging()
parent_session.write_json("continue")
parent_backchannel.send("continue")
grandchild_session.wait_for_termination()
child_session.wait_for_termination()
assert parent_session.read_json() == "done"
assert parent_backchannel.receive() == "done"
parent_session.wait_for_exit()
@ -153,7 +154,7 @@ def test_subprocess(pyfile, start_method, run_as):
import backchannel
import debug_me # noqa
backchannel.write_json(sys.argv)
backchannel.send(sys.argv)
@pyfile
def parent():
@ -175,6 +176,7 @@ def test_subprocess(pyfile, start_method, run_as):
with debug.Session() as parent_session:
parent_session.program_args += [child]
parent_backchannel = parent_session.setup_backchannel()
parent_session.initialize(
multiprocess=True,
target=(run_as, parent),
@ -210,7 +212,7 @@ def test_subprocess(pyfile, start_method, run_as):
with parent_session.connect_to_child_session(child_subprocess) as child_session:
child_session.start_debugging()
child_argv = parent_session.read_json()
child_argv = parent_backchannel.receive()
assert child_argv == [child, "--arg1", "--arg2", "--arg3"]
child_session.wait_for_termination()
@ -247,10 +249,11 @@ def test_autokill(pyfile, start_method, run_as):
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
backchannel.read_json()
backchannel.receive()
with debug.Session() as parent_session:
parent_session.program_args += [child]
parent_backchannel = parent_session.setup_backchannel()
parent_session.initialize(
multiprocess=True,
target=(run_as, parent),
@ -270,7 +273,7 @@ def test_autokill(pyfile, start_method, run_as):
else:
# In attach scenario, just let the parent process run to completion.
parent_session.expected_returncode = 0
parent_session.write_json(None)
parent_backchannel.send(None)
child_session.wait_for_termination()
parent_session.wait_for_exit()
@ -316,12 +319,13 @@ def test_argv_quoting(pyfile, start_method, run_as):
from args import args as expected_args
backchannel.write_json(expected_args)
backchannel.send(expected_args)
actual_args = sys.argv[1:]
backchannel.write_json(actual_args)
backchannel.send(actual_args)
with debug.Session() as session:
backchannel = session.setup_backchannel()
session.initialize(
target=(run_as, parent),
start_method=start_method,
@ -331,8 +335,8 @@ def test_argv_quoting(pyfile, start_method, run_as):
session.start_debugging()
expected_args = session.read_json()
actual_args = session.read_json()
expected_args = backchannel.receive()
actual_args = backchannel.receive()
assert expected_args == actual_args
session.wait_for_exit()

View file

@ -44,7 +44,7 @@ def test_with_tab_in_output(pyfile, start_method, run_as):
# Breakpoint at the end just to make sure we get all output events.
session.wait_for_stop()
session.send_continue()
session.request_continue()
session.wait_for_exit()
output = session.all_occurrences_of(
@ -76,7 +76,7 @@ def test_redirect_output(pyfile, start_method, run_as, redirect):
# Breakpoint at the end just to make sure we get all output events.
session.wait_for_stop()
session.send_continue()
session.request_continue()
session.wait_for_exit()
output = session.all_occurrences_of(

View file

@ -29,10 +29,11 @@ def test_client_ide_from_path_mapping_linux_backend(
from debug_me import backchannel
import pydevd_file_utils
backchannel.write_json({"ide_os": pydevd_file_utils._ide_os})
backchannel.send({"ide_os": pydevd_file_utils._ide_os})
print("done") # @break_here
with debug.Session() as session:
backchannel = session.setup_backchannel()
session.initialize(
target=(run_as, code_to_debug),
start_method=start_method,
@ -56,10 +57,10 @@ def test_client_ide_from_path_mapping_linux_backend(
code_to_debug
)
json_read = session.read_json()
json_read = backchannel.receive()
assert json_read == {"ide_os": "WINDOWS"}
session.send_continue()
session.request_continue()
session.wait_for_exit()
@ -69,7 +70,7 @@ def test_with_dot_remote_root(pyfile, tmpdir, start_method, run_as):
from debug_me import backchannel
import os
backchannel.write_json(os.path.abspath(__file__))
backchannel.send(os.path.abspath(__file__))
print("done") # @bp
path_local = tmpdir.mkdir("local").join("code_to_debug.py").strpath
@ -82,6 +83,7 @@ def test_with_dot_remote_root(pyfile, tmpdir, start_method, run_as):
shutil.copyfile(code_to_debug, path_remote)
with debug.Session() as session:
backchannel = session.setup_backchannel()
session.initialize(
target=(run_as, path_remote),
start_method=start_method,
@ -96,10 +98,10 @@ def test_with_dot_remote_root(pyfile, tmpdir, start_method, run_as):
print("Frames: " + str(hit.frames))
assert hit.frames[0]["source"]["path"] == some.path(path_local)
remote_code_path = session.read_json()
remote_code_path = backchannel.receive()
assert path_remote == some.path(remote_code_path)
session.send_continue()
session.request_continue()
session.wait_for_exit()
@ -110,7 +112,7 @@ def test_with_path_mappings(pyfile, tmpdir, start_method, run_as):
import os
import sys
json = backchannel.read_json()
json = backchannel.receive()
call_me_back_dir = json["call_me_back_dir"]
sys.path.append(call_me_back_dir)
@ -119,7 +121,7 @@ def test_with_path_mappings(pyfile, tmpdir, start_method, run_as):
def call_func():
print("break here") # @bp
backchannel.write_json(os.path.abspath(__file__))
backchannel.send(os.path.abspath(__file__))
call_me_back.call_me_back(call_func)
print("done")
@ -135,6 +137,7 @@ def test_with_path_mappings(pyfile, tmpdir, start_method, run_as):
call_me_back_dir = test_data / "call_me_back"
with debug.Session() as session:
backchannel = session.setup_backchannel()
session.initialize(
target=(run_as, path_remote),
start_method=start_method,
@ -143,7 +146,7 @@ def test_with_path_mappings(pyfile, tmpdir, start_method, run_as):
)
session.set_breakpoints(path_remote, [code_to_debug.lines["bp"]])
session.start_debugging()
session.write_json({"call_me_back_dir": call_me_back_dir})
backchannel.send({"call_me_back_dir": call_me_back_dir})
hit = session.wait_for_stop("breakpoint")
assert hit.frames[0]["source"]["path"] == some.path(path_local)
@ -168,8 +171,8 @@ def test_with_path_mappings(pyfile, tmpdir, start_method, run_as):
).wait_for_response()
assert "def call_me_back(callback):" in (resp_source.body["content"])
remote_code_path = session.read_json()
remote_code_path = backchannel.receive()
assert path_remote == some.path(remote_code_path)
session.send_continue()
session.request_continue()
session.wait_for_exit()

View file

@ -23,7 +23,7 @@ def test_run(pyfile, start_method, run_as):
import sys
print("begin")
assert backchannel.receive() == "continue"
backchannel.wait_for("continue")
backchannel.send(path.abspath(sys.modules["ptvsd"].__file__))
print("end")
@ -37,24 +37,25 @@ def test_run(pyfile, start_method, run_as):
expected_name = (
"-c"
if run_as == "code"
else some.str.matching(re.escape(code_to_debug) + r"(c|o)?$")
else some.str.matching(re.escape(code_to_debug.strpath) + r"(c|o)?$")
)
assert process_event == Event(
"process", some.dict.containing({"name": expected_name})
)
backchannel.send("continue")
ptvsd_path = backchannel.receive()
expected_ptvsd_path = path.abspath(ptvsd.__file__)
assert re.match(re.escape(expected_ptvsd_path) + r"(c|o)?$", ptvsd_path)
backchannel.expect(some.str.matching(
re.escape(expected_ptvsd_path) + r"(c|o)?$"
))
session.wait_for_exit()
def test_run_submodule():
cwd = str(test_data / "testpkgs")
with debug.Session("launch") as session:
session.initialize(target=("module", "pkg1.sub"), cwd=cwd)
session.initialize(target=("module", "pkg1.sub"), cwd=test_data / "testpkgs")
session.start_debugging()
session.wait_for_next(
Event(

View file

@ -16,9 +16,10 @@ def test_set_expression(pyfile, start_method, run_as):
a = 1
ptvsd.break_into_debugger()
backchannel.write_json(a)
backchannel.send(a)
with debug.Session() as session:
backchannel = session.setup_backchannel()
session.initialize(
target=(run_as, code_to_debug),
start_method=start_method,
@ -58,8 +59,8 @@ def test_set_expression(pyfile, start_method, run_as):
{"type": "int", "value": "1000"}
)
session.send_continue()
session.request_continue()
assert session.read_json() == 1000
assert backchannel.receive() == 1000
session.wait_for_exit()

View file

@ -15,7 +15,7 @@ from tests.patterns import some
@pytest.mark.parametrize("start_method", ["launch"])
@pytest.mark.skipif(
sys.version_info < (3, 0) and platform.system() == "Windows",
reason="On Win32 Python2.7, unable to send key strokes to test.",
reason="On Windows + Python 2, unable to send key strokes to test.",
)
def test_wait_on_normal_exit_enabled(pyfile, start_method, run_as):
@pyfile
@ -24,9 +24,10 @@ def test_wait_on_normal_exit_enabled(pyfile, start_method, run_as):
import ptvsd
ptvsd.break_into_debugger()
backchannel.write_json("done")
backchannel.send("done")
with debug.Session() as session:
backchannel = session.setup_backchannel()
session.initialize(
target=(run_as, code_to_debug),
start_method=start_method,
@ -36,26 +37,21 @@ def test_wait_on_normal_exit_enabled(pyfile, start_method, run_as):
session.start_debugging()
session.wait_for_stop()
session.send_continue()
session.request_continue()
session.expected_returncode = some.int
assert session.read_json() == "done"
assert backchannel.receive() == "done"
session.process.stdin.write(b" \r\n")
session.wait_for_exit()
decoded = "\n".join(
(x.decode("utf-8") if isinstance(x, bytes) else x)
for x in session.output_data["OUT"]
)
assert "Press" in decoded
assert any(s.startswith("Press") for s in session.stdout_lines("utf-8"))
@pytest.mark.parametrize("start_method", ["launch"])
@pytest.mark.skipif(
sys.version_info < (3, 0) and platform.system() == "Windows",
reason="On windows py2.7 unable to send key strokes to test.",
reason="On Windows + Python 2, unable to send key strokes to test.",
)
def test_wait_on_abnormal_exit_enabled(pyfile, start_method, run_as):
@pyfile
@ -65,10 +61,11 @@ def test_wait_on_abnormal_exit_enabled(pyfile, start_method, run_as):
import ptvsd
ptvsd.break_into_debugger()
backchannel.write_json("done")
backchannel.send("done")
sys.exit(12345)
with debug.Session() as session:
backchannel = session.setup_backchannel()
session.initialize(
target=(run_as, code_to_debug),
start_method=start_method,
@ -78,22 +75,15 @@ def test_wait_on_abnormal_exit_enabled(pyfile, start_method, run_as):
session.start_debugging()
session.wait_for_stop()
session.send_continue()
session.request_continue()
session.expected_returncode = some.int
assert session.read_json() == "done"
assert backchannel.receive() == "done"
session.process.stdin.write(b" \r\n")
session.wait_for_exit()
def _decode(text):
if isinstance(text, bytes):
return text.decode("utf-8")
return text
assert any(
l for l in session.output_data["OUT"] if _decode(l).startswith("Press")
)
assert any(s.startswith("Press") for s in session.stdout_lines("utf-8"))
@pytest.mark.parametrize("start_method", ["launch"])
@ -104,9 +94,10 @@ def test_exit_normally_with_wait_on_abnormal_exit_enabled(pyfile, start_method,
import ptvsd
ptvsd.break_into_debugger()
backchannel.write_json("done")
backchannel.send("done")
with debug.Session() as session:
backchannel = session.setup_backchannel()
session.initialize(
target=(run_as, code_to_debug),
start_method=start_method,
@ -116,10 +107,10 @@ def test_exit_normally_with_wait_on_abnormal_exit_enabled(pyfile, start_method,
session.start_debugging()
session.wait_for_stop()
session.send_continue()
session.request_continue()
session.wait_for_termination()
assert session.read_json() == "done"
assert backchannel.receive() == "done"
session.wait_for_exit()

View file

@ -17,9 +17,10 @@ def test_stop_on_entry(pyfile, start_method, run_as, with_bp):
def code_to_debug():
from debug_me import backchannel # @bp
backchannel.write_json("done")
backchannel.send("done")
with debug.Session() as session:
backchannel = session.setup_backchannel()
session.initialize(
target=(run_as, code_to_debug),
start_method=start_method,
@ -45,9 +46,9 @@ def test_stop_on_entry(pyfile, start_method, run_as, with_bp):
assert hit.frames[0]["line"] == 1
assert hit.frames[0]["source"]["path"] == some.path(code_to_debug)
session.send_continue()
session.request_continue()
session.wait_for_termination()
assert session.read_json() == "done"
assert backchannel.receive() == "done"
session.wait_for_exit()

View file

@ -50,7 +50,7 @@ def test_thread_count(pyfile, start_method, run_as, count):
assert len(resp_threads.body["threads"]) == count
session.send_continue()
session.request_continue()
session.wait_for_exit()
@ -111,5 +111,5 @@ def test_debug_this_thread(pyfile, start_method, run_as):
session.start_debugging()
session.wait_for_stop()
session.send_continue()
session.request_continue()
session.wait_for_exit()

View file

@ -32,7 +32,7 @@ def test_stack_format(pyfile, start_method, run_as, module, line):
start_method=start_method,
ignore_unobserved=[Event("stopped")],
)
session.set_breakpoints(test_module, [code_to_debug.lines["bp"]])
session.set_breakpoints(test_module, [test_module.lines["bp"]])
session.start_debugging()
hit = session.wait_for_stop()
@ -47,12 +47,12 @@ def test_stack_format(pyfile, start_method, run_as, module, line):
frames = resp_stacktrace.body["stackFrames"]
assert line == (
frames[0]["name"].find(": " + str(code_to_debug.lines["bp"])) > -1
frames[0]["name"].find(": " + str(test_module.lines["bp"])) > -1
)
assert module == (frames[0]["name"].find("test_module") > -1)
session.send_continue()
session.request_continue()
session.wait_for_exit()
@ -96,5 +96,5 @@ def test_module_events(pyfile, start_method, run_as):
("__main__", some.path(test_code)),
]
session.send_continue()
session.request_continue()
session.wait_for_exit()

View file

@ -12,7 +12,7 @@ import tempfile
import threading
import types
from ptvsd.common import timestamp
from ptvsd.common import compat, timestamp
from tests import code, pydevd_log
__all__ = ['run_as', 'start_method', 'with_pydevd_log', 'daemon', 'pyfile']
@ -126,7 +126,7 @@ def pyfile(request, tmpdir):
it cannot reuse top-level module imports - it must import all the modules
that it uses locally. When linter complains, use #noqa.
The returned object is a subclass of str that has an additional attribute "lines".
Returns a py.path.local instance that has the additional attribute "lines".
After the source is writen to disk, tests.code.get_marked_line_numbers() is
invoked on the resulting file to compute the value of that attribute.
"""
@ -157,16 +157,15 @@ def pyfile(request, tmpdir):
line = source[0]
indent = len(line) - len(line.lstrip())
source = [l[indent:] if l.strip() else '\n' for l in source]
source = ''.join(source)
# Write it to file.
source = ''.join(source)
tmpfile = tmpdir.join(name + '.py')
tmpfile = tmpdir / (name + '.py')
tmpfile.strpath = compat.filename(tmpfile.strpath)
assert not tmpfile.check()
tmpfile.write(source)
class PyFile(str):
lines = code.get_marked_line_numbers(tmpfile.strpath)
return PyFile(tmpfile.strpath)
tmpfile.lines = code.get_marked_line_numbers(tmpfile)
return tmpfile
return factory

View file

@ -22,7 +22,12 @@ from ptvsd.common import fmt, log, messaging
name = fmt("backchannel-{0}", debug_me.session_id)
port = int(os.getenv('PTVSD_BACKCHANNEL_PORT', 0))
port = os.getenv("PTVSD_BACKCHANNEL_PORT")
if port is not None:
port = int(port)
# Remove it, so that child processes don't try to use the same backchannel.
del os.environ["PTVSD_BACKCHANNEL_PORT"]
if port:
log.info('Connecting {0} to port {1}...', name, port)
@ -32,9 +37,6 @@ if port:
_socket.connect(('localhost', port))
_stream = messaging.JsonIOStream.from_socket(_socket, name='backchannel')
receive = _stream.read_json
send = _stream.write_json
@atexit.register
def _atexit_handler():
log.info('Shutting down {0}...', name)
@ -47,3 +49,22 @@ if port:
_socket.close()
except Exception:
pass
else:
class _stream:
def _error(*_):
raise AssertionError("Backchannel is not set up for this process")
read_json = write_json = _error
def send(value):
_stream.write_json(value)
def receive():
return _stream.read_json()
def wait_for(value):
assert receive() == value

View file

@ -127,6 +127,22 @@ def test_list():
assert [1, 2, 3] == [1, some.thing, 3]
assert [1, 2, 3, 4] != [1, some.thing, 4]
assert [1, 2, 3, 4] == some.list.containing(1)
assert [1, 2, 3, 4] == some.list.containing(2)
assert [1, 2, 3, 4] == some.list.containing(3)
assert [1, 2, 3, 4] == some.list.containing(4)
assert [1, 2, 3, 4] == some.list.containing(1, 2)
assert [1, 2, 3, 4] == some.list.containing(2, 3)
assert [1, 2, 3, 4] == some.list.containing(3, 4)
assert [1, 2, 3, 4] == some.list.containing(1, 2, 3)
assert [1, 2, 3, 4] == some.list.containing(2, 3, 4)
assert [1, 2, 3, 4] == some.list.containing(1, 2, 3, 4)
assert [1, 2, 3, 4] != some.list.containing(5)
assert [1, 2, 3, 4] != some.list.containing(1, 3)
assert [1, 2, 3, 4] != some.list.containing(1, 2, 4)
assert [1, 2, 3, 4] != some.list.containing(2, 3, 5)
def test_dict():
pattern = {'a': some.thing, 'b': 2}