From f391eb44907034e2e40bbc3e95d2ec3f0eb93308 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Thu, 21 Dec 2017 12:33:47 -0700 Subject: [PATCH 01/32] Add the debugger_protocol package (and schema subpackage). --- debugger_protocol/__init__.py | 0 debugger_protocol/schema/__init__.py | 8 ++++++++ 2 files changed, 8 insertions(+) create mode 100644 debugger_protocol/__init__.py create mode 100644 debugger_protocol/schema/__init__.py diff --git a/debugger_protocol/__init__.py b/debugger_protocol/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/debugger_protocol/schema/__init__.py b/debugger_protocol/schema/__init__.py new file mode 100644 index 00000000..04621791 --- /dev/null +++ b/debugger_protocol/schema/__init__.py @@ -0,0 +1,8 @@ + +import os.path + + +DATA_DIR = os.path.dirname(__file__) + +UPSTREAM = 'https://raw.githubusercontent.com/Microsoft/vscode-debugadapter-node/master/debugProtocol.json' +VENDORED = os.path.join(DATA_DIR, 'debugProtocol.json') From 33e0dc5b57662dc4a452629ed21c41af546e7675 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Thu, 21 Dec 2017 12:52:25 -0700 Subject: [PATCH 02/32] Stub out the schema script. --- debugger_protocol/schema/__main__.py | 53 ++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 debugger_protocol/schema/__main__.py diff --git a/debugger_protocol/schema/__main__.py b/debugger_protocol/schema/__main__.py new file mode 100644 index 00000000..9a182c21 --- /dev/null +++ b/debugger_protocol/schema/__main__.py @@ -0,0 +1,53 @@ + +import argparse +import sys + +from . import UPSTREAM, VENDORED + + +COMMANDS = {} + +def as_command(name): + def decorator(f): + COMMANDS[name] = f + return f + return decorator + + +############################# +# the script + +def parse_args(argv=sys.argv[1:], prog=None): + if prog is None: + if __name__ == '__main__': + module = __spec__.name + pkg, _, mod = module.rpartition('.') + if not pkg: + module = mod + elif mod == '__main__': + module = pkg + prog = 'python3 -m {}'.format(module) + else: + prog = sys.argv[0] + + parser = argparse.ArgumentParser( + prog=prog, + description='Manage the vendored VSC debugger protocol schema.', + ) + subs = parser.add_subparsers(dest='command') + + args = parser.parse_args(argv) + if args.command is None: + parser.print_help() + parser.exit() + return args + + +def main(command, **kwargs): + handle_command = COMMANDS[command] + return handle_command(**kwargs) + + +if __name__ == '__main__': + args = parse_args() + main(**(vars(args))) From 17847eb54e1de66612a28ea624bdd139e96ddaf4 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Tue, 9 Jan 2018 23:41:23 +0000 Subject: [PATCH 03/32] Add upstream.Metadata. --- debugger_protocol/schema/__init__.py | 3 +- debugger_protocol/schema/upstream.py | 79 ++++++++++ tests/debugger_protocol/__init__.py | 0 tests/debugger_protocol/schema/__init__.py | 0 .../debugger_protocol/schema/test_upstream.py | 148 ++++++++++++++++++ 5 files changed, 229 insertions(+), 1 deletion(-) create mode 100644 debugger_protocol/schema/upstream.py create mode 100644 tests/debugger_protocol/__init__.py create mode 100644 tests/debugger_protocol/schema/__init__.py create mode 100644 tests/debugger_protocol/schema/test_upstream.py diff --git a/debugger_protocol/schema/__init__.py b/debugger_protocol/schema/__init__.py index 04621791..2c1c15c7 100644 --- a/debugger_protocol/schema/__init__.py +++ b/debugger_protocol/schema/__init__.py @@ -4,5 +4,6 @@ import os.path DATA_DIR = os.path.dirname(__file__) -UPSTREAM = 'https://raw.githubusercontent.com/Microsoft/vscode-debugadapter-node/master/debugProtocol.json' +UPSTREAM = 'https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json' # noqa VENDORED = os.path.join(DATA_DIR, 'debugProtocol.json') +METADATA = os.path.join(DATA_DIR, 'UPSTREAM') diff --git a/debugger_protocol/schema/upstream.py b/debugger_protocol/schema/upstream.py new file mode 100644 index 00000000..ced76dbe --- /dev/null +++ b/debugger_protocol/schema/upstream.py @@ -0,0 +1,79 @@ +from collections import namedtuple +from datetime import datetime +from textwrap import dedent + +from . import UPSTREAM + + +class Metadata(namedtuple('Metadata', 'upstream revision checksum date')): + """Info about the local copy of the upstream schema file.""" + + TIMESTAMP = '%Y-%m-%d %H:%M:%S (UTC)' + + FORMAT = dedent("""\ + upstream: {} + revision: {} + checksum: {} + date: {:%s} + """) % TIMESTAMP + + @classmethod + def parse(cls, data): + """Return an instance based on the given metadata string.""" + lines = data.splitlines() + + kwargs = {} + for line in lines: + line = line.strip() + if line.startswith('#'): + continue + if not line: + continue + field, _, value = line.partition(':') + kwargs[field] = value.strip() + self = cls(**kwargs) + return self + + def __new__(cls, upstream, revision, checksum, date): + # coercion + upstream = str(upstream) if upstream else None + revision = str(revision) if revision else None + checksum = str(checksum) if checksum else None + if not date: + date = None + elif isinstance(date, str): + date = datetime.strptime(date, cls.TIMESTAMP) + elif date.tzinfo is not None: + date -= date.utcoffset() + + self = super().__new__(cls, upstream, revision, checksum, date) + return self + + def __init__(self, *args, **kwargs): + # validation + + if not self.upstream: + raise ValueError('missing upstream URL') + # TODO ensure upstream is URL? + + if not self.revision: + raise ValueError('missing upstream revision') + # TODO ensure revision is a hash? + + if not self.checksum: + raise ValueError('missing checksum') + # TODO ensure checksum is a MD5 hash? + + if not self.date: + raise ValueError('missing date') + + @property + def url(self): + if self.upstream == UPSTREAM: + return self.upstream.replace('master', self.revision) + else: + raise NotImplementedError + + def format(self): + """Return a string containing the formatted metadata.""" + return self.FORMAT.format(*self) diff --git a/tests/debugger_protocol/__init__.py b/tests/debugger_protocol/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/debugger_protocol/schema/__init__.py b/tests/debugger_protocol/schema/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/debugger_protocol/schema/test_upstream.py b/tests/debugger_protocol/schema/test_upstream.py new file mode 100644 index 00000000..0c6ac15d --- /dev/null +++ b/tests/debugger_protocol/schema/test_upstream.py @@ -0,0 +1,148 @@ +from datetime import datetime +from textwrap import dedent +import unittest + +from debugger_protocol.schema import UPSTREAM +from debugger_protocol.schema.upstream import Metadata + + +class Stringlike: + + def __init__(self, value): + self.value = value + + def __str__(self): + return self.value + + +class Hash(Stringlike): + pass + + +class MetadataTests(unittest.TestCase): + + def test_parse_minimal(self): + expected = Metadata('https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + ) + meta = Metadata.parse(dedent(""" + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: deadbeefdeadbeefdeadbeefdeadbeef + date: 2018-01-09 13:10:59 (UTC) + """)) + + self.assertEqual(meta, expected) + + def test_parse_with_whitespace_and_comments(self): + expected = Metadata('https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + ) + meta = Metadata.parse(dedent(""" + + # generated by x.y.z + upstream: https://x.y.z/schema.json + + revision: abcdef0123456789 + checksum: deadbeefdeadbeefdeadbeefdeadbeef + date: 2018-01-09 13:10:59 (UTC) + + # done! + + """)) # noqa + + self.assertEqual(meta, expected) + + def test_parse_roundtrip_from_object(self): + orig = Metadata('https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + ) + meta = Metadata.parse( + orig.format()) + + self.assertEqual(meta, orig) + + def test_parse_roundtrip_from_string(self): + orig = dedent("""\ + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: deadbeefdeadbeefdeadbeefdeadbeef + date: 2018-01-09 13:10:59 (UTC) + """).format(UPSTREAM) + data = (Metadata.parse(orig) + ).format() + + self.assertEqual(data, orig) + + def test_coercion_noop(self): + meta = Metadata('https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + ) + + self.assertEqual(meta, ( + 'https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + )) + + def test_coercion_change_all(self): + meta = Metadata(Stringlike('https://x.y.z/schema.json'), + Hash('abcdef0123456789'), + Hash('deadbeefdeadbeefdeadbeefdeadbeef'), + '2018-01-09 13:10:59 (UTC)', + ) + + self.assertEqual(meta, ( + 'https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + )) + + def test_validation_fail(self): + baseargs = [ + 'https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + ] + for i in range(len(baseargs)): + with self.subTest(baseargs[i]): + args = list(baseargs) + args[i] = '' + with self.assertRaises(ValueError): + Metadata(*args) + + def test_url(self): + meta = Metadata(UPSTREAM, + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + ) + url = meta.url + + self.assertEqual(url, 'https://github.com/Microsoft/vscode-debugadapter-node/raw/abcdef0123456789/debugProtocol.json') # noqa + + def test_format(self): + meta = Metadata('https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + ) + formatted = meta.format() + + self.assertEqual(formatted, dedent("""\ + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: deadbeefdeadbeefdeadbeefdeadbeef + date: 2018-01-09 13:10:59 (UTC) + """)) From 7fb4e339b6dfc2f20b1874fc7bc0a520cd74000e Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Wed, 10 Jan 2018 01:01:53 +0000 Subject: [PATCH 04/32] Only test debugger_protocol under Python 3. --- tests/debugger_protocol/__init__.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/debugger_protocol/__init__.py b/tests/debugger_protocol/__init__.py index e69de29b..bd305792 100644 --- a/tests/debugger_protocol/__init__.py +++ b/tests/debugger_protocol/__init__.py @@ -0,0 +1,6 @@ +import sys +import unittest + + +if sys.version_info[0] == 2: + raise unittest.SkipTest('not tested under Python 2') From ff090e1ad784a65fb8c0f775821b28949406910c Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Wed, 10 Jan 2018 01:08:07 +0000 Subject: [PATCH 05/32] Add some schema utils. --- debugger_protocol/schema/__init__.py | 1 - debugger_protocol/schema/_util.py | 50 +++++++++++++++++++++ tests/debugger_protocol/schema/test_util.py | 29 ++++++++++++ 3 files changed, 79 insertions(+), 1 deletion(-) create mode 100644 debugger_protocol/schema/_util.py create mode 100644 tests/debugger_protocol/schema/test_util.py diff --git a/debugger_protocol/schema/__init__.py b/debugger_protocol/schema/__init__.py index 2c1c15c7..f8f9e890 100644 --- a/debugger_protocol/schema/__init__.py +++ b/debugger_protocol/schema/__init__.py @@ -1,4 +1,3 @@ - import os.path diff --git a/debugger_protocol/schema/_util.py b/debugger_protocol/schema/_util.py new file mode 100644 index 00000000..1c7b4bb4 --- /dev/null +++ b/debugger_protocol/schema/_util.py @@ -0,0 +1,50 @@ +import hashlib +import json +import re +import urllib.request + + +def open_url(url): + """Return a file-like object for (binary) reading the given URL.""" + return urllib.request.urlopen(url) + + +def get_revision(url, *, _open=open_url): + """Return the revision corresponding to the given URL.""" + if url.startswith('https://github.com/'): + return get_github_revision(url, _open=_open) + else: + raise NotImplementedError + + +def get_checksum(data): + """Return the MD5 hash for the given data.""" + m = hashlib.md5() + m.update(data) + return m.hexdigest() + + +################################## +# github + +GH_RESOURCE_RE = re.compile(r'^https://github.com' + r'/(?P[^/]*)' + r'/(?P[^/]*)' + r'/(?P[^/]*)' + r'/(?P[^/]*)' + r'/(?P.*)$') + + +def get_github_revision(url, *, _open=open_url): + """Return the full commit hash corresponding to the given URL.""" + m = GH_RESOURCE_RE.match(url) + if not m: + raise ValueError('invalid GitHub resource URL: {!r}'.format(url)) + org, repo, _, ref, _ = m.groups() + + revurl = ('https://api.github.com/repos/{}/{}/commits/{}' + ).format(org, repo, ref) + with _open(revurl) as revinfo: + raw = revinfo.read() + data = json.loads(raw.decode()) + return data['sha'] diff --git a/tests/debugger_protocol/schema/test_util.py b/tests/debugger_protocol/schema/test_util.py new file mode 100644 index 00000000..d24a265e --- /dev/null +++ b/tests/debugger_protocol/schema/test_util.py @@ -0,0 +1,29 @@ +import io +import unittest + +from debugger_protocol.schema._util import get_revision, get_checksum + + +class GetRevisionTests(unittest.TestCase): + + def test_github(self): + buf = io.BytesIO( + b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}') + revision = get_revision('https://github.com/x/y/raw/master/z', + _open=lambda _: buf) + + self.assertEqual(revision, 'fc2395ca3564fb2afded8d90ddbe38dad1bf86f1') + + +class GetChecksumTests(unittest.TestCase): + + def test_checksums(self): + checksums = { + b'': 'd41d8cd98f00b204e9800998ecf8427e', + b'spam': 'e09f6a7593f8ae3994ea57e1117f67ec', + } + for data, expected in checksums.items(): + with self.subTest(data): + checksum = get_checksum(data) + + self.assertEqual(checksum, expected) From b8cf3d3e9e5c948dae50057b336938af9fe2c3a8 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Wed, 10 Jan 2018 01:18:15 +0000 Subject: [PATCH 06/32] Add a command to download the upstream schema file. --- debugger_protocol/schema/UPSTREAM | 4 + debugger_protocol/schema/__main__.py | 26 +- debugger_protocol/schema/debugProtocol.json | 2818 +++++++++++++++++ debugger_protocol/schema/upstream.py | 18 + .../debugger_protocol/schema/test_upstream.py | 28 +- 5 files changed, 2891 insertions(+), 3 deletions(-) create mode 100644 debugger_protocol/schema/UPSTREAM create mode 100644 debugger_protocol/schema/debugProtocol.json diff --git a/debugger_protocol/schema/UPSTREAM b/debugger_protocol/schema/UPSTREAM new file mode 100644 index 00000000..f6e399d4 --- /dev/null +++ b/debugger_protocol/schema/UPSTREAM @@ -0,0 +1,4 @@ +upstream: https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json +revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 +checksum: 24a370d038f7875f4db2631d5238fd17 +date: 2018-01-10 00:47:10 (UTC) diff --git a/debugger_protocol/schema/__main__.py b/debugger_protocol/schema/__main__.py index 9a182c21..5c5847ee 100644 --- a/debugger_protocol/schema/__main__.py +++ b/debugger_protocol/schema/__main__.py @@ -1,12 +1,15 @@ - import argparse +import os.path import sys -from . import UPSTREAM, VENDORED +from . import (UPSTREAM, VENDORED, METADATA, + upstream) +from ._util import open_url COMMANDS = {} + def as_command(name): def decorator(f): COMMANDS[name] = f @@ -14,6 +17,21 @@ def as_command(name): return decorator +@as_command('download') +def handle_download(source=UPSTREAM, target=VENDORED): + # Download the schema file. + with open_url(source) as infile: + with open(target, 'wb') as outfile: + meta = upstream.download(source, infile, outfile) + + # Save the metadata. + filename = os.path.join(os.path.dirname(target), + os.path.basename(METADATA)) + with open(filename, 'w') as metafile: + metafile.write( + meta.format()) + + ############################# # the script @@ -36,6 +54,10 @@ def parse_args(argv=sys.argv[1:], prog=None): ) subs = parser.add_subparsers(dest='command') + download = subs.add_parser('download') + download.add_argument('--source', default=UPSTREAM) + download.add_argument('--target', default=VENDORED) + args = parser.parse_args(argv) if args.command is None: parser.print_help() diff --git a/debugger_protocol/schema/debugProtocol.json b/debugger_protocol/schema/debugProtocol.json new file mode 100644 index 00000000..6cf94354 --- /dev/null +++ b/debugger_protocol/schema/debugProtocol.json @@ -0,0 +1,2818 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "VS Code Debug Protocol", + "description": "A json schema for the VS Code Debug Protocol", + "type": "object", + + "definitions": { + + "ProtocolMessage": { + "type": "object", + "description": "Base class of requests, responses, and events.", + "properties": { + "seq": { + "type": "integer", + "description": "Sequence number." + }, + "type": { + "type": "string", + "description": "Message type.", + "_enum": [ "request", "response", "event" ] + } + }, + "required": [ "seq", "type" ] + }, + + "Request": { + "allOf": [ { "$ref": "#/definitions/ProtocolMessage" }, { + "type": "object", + "description": "A client or server-initiated request.", + "properties": { + "type": { + "type": "string", + "enum": [ "request" ] + }, + "command": { + "type": "string", + "description": "The command to execute." + }, + "arguments": { + "type": [ "array", "boolean", "integer", "null", "number" , "object", "string" ], + "description": "Object containing arguments for the command." + } + }, + "required": [ "type", "command" ] + }] + }, + + "Event": { + "allOf": [ { "$ref": "#/definitions/ProtocolMessage" }, { + "type": "object", + "description": "Server-initiated event.", + "properties": { + "type": { + "type": "string", + "enum": [ "event" ] + }, + "event": { + "type": "string", + "description": "Type of event." + }, + "body": { + "type": [ "array", "boolean", "integer", "null", "number" , "object", "string" ], + "description": "Event-specific information." + } + }, + "required": [ "type", "event" ] + }] + }, + + "Response": { + "allOf": [ { "$ref": "#/definitions/ProtocolMessage" }, { + "type": "object", + "description": "Response to a request.", + "properties": { + "type": { + "type": "string", + "enum": [ "response" ] + }, + "request_seq": { + "type": "integer", + "description": "Sequence number of the corresponding request." + }, + "success": { + "type": "boolean", + "description": "Outcome of the request." + }, + "command": { + "type": "string", + "description": "The command requested." + }, + "message": { + "type": "string", + "description": "Contains error message if success == false." + }, + "body": { + "type": [ "array", "boolean", "integer", "null", "number" , "object", "string" ], + "description": "Contains request result if success is true and optional error details if success is false." + } + }, + "required": [ "type", "request_seq", "success", "command" ] + }] + }, + + "InitializedEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "Event message for 'initialized' event type.\nThis event indicates that the debug adapter is ready to accept configuration requests (e.g. SetBreakpointsRequest, SetExceptionBreakpointsRequest).\nA debug adapter is expected to send this event when it is ready to accept configuration requests (but not before the InitializeRequest has finished).\nThe sequence of events/requests is as follows:\n- adapters sends InitializedEvent (after the InitializeRequest has returned)\n- frontend sends zero or more SetBreakpointsRequest\n- frontend sends one SetFunctionBreakpointsRequest\n- frontend sends a SetExceptionBreakpointsRequest if one or more exceptionBreakpointFilters have been defined (or if supportsConfigurationDoneRequest is not defined or false)\n- frontend sends other future configuration requests\n- frontend sends one ConfigurationDoneRequest to indicate the end of the configuration", + "properties": { + "event": { + "type": "string", + "enum": [ "initialized" ] + } + }, + "required": [ "event" ] + }] + }, + + "StoppedEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "Event message for 'stopped' event type.\nThe event indicates that the execution of the debuggee has stopped due to some condition.\nThis can be caused by a break point previously set, a stepping action has completed, by executing a debugger statement etc.", + "properties": { + "event": { + "type": "string", + "enum": [ "stopped" ] + }, + "body": { + "type": "object", + "properties": { + "reason": { + "type": "string", + "description": "The reason for the event.\nFor backward compatibility this string is shown in the UI if the 'description' attribute is missing (but it must not be translated).", + "_enum": [ "step", "breakpoint", "exception", "pause", "entry" ] + }, + "description": { + "type": "string", + "description": "The full reason for the event, e.g. 'Paused on exception'. This string is shown in the UI as is." + }, + "threadId": { + "type": "integer", + "description": "The thread which was stopped." + }, + "text": { + "type": "string", + "description": "Additional information. E.g. if reason is 'exception', text contains the exception name. This string is shown in the UI." + }, + "allThreadsStopped": { + "type": "boolean", + "description": "If allThreadsStopped is true, a debug adapter can announce that all threads have stopped.\n* The client should use this information to enable that all threads can be expanded to access their stacktraces.\n* If the attribute is missing or false, only the thread with the given threadId can be expanded." + } + }, + "required": [ "reason" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "ContinuedEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "Event message for 'continued' event type.\nThe event indicates that the execution of the debuggee has continued.\nPlease note: a debug adapter is not expected to send this event in response to a request that implies that execution continues, e.g. 'launch' or 'continue'.\nIt is only necessary to send a ContinuedEvent if there was no previous request that implied this.", + "properties": { + "event": { + "type": "string", + "enum": [ "continued" ] + }, + "body": { + "type": "object", + "properties": { + "threadId": { + "type": "integer", + "description": "The thread which was continued." + }, + "allThreadsContinued": { + "type": "boolean", + "description": "If allThreadsContinued is true, a debug adapter can announce that all threads have continued." + } + }, + "required": [ "threadId" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "ExitedEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "Event message for 'exited' event type.\nThe event indicates that the debuggee has exited.", + "properties": { + "event": { + "type": "string", + "enum": [ "exited" ] + }, + "body": { + "type": "object", + "properties": { + "exitCode": { + "type": "integer", + "description": "The exit code returned from the debuggee." + } + }, + "required": [ "exitCode" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "TerminatedEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "Event message for 'terminated' event types.\nThe event indicates that debugging of the debuggee has terminated.", + "properties": { + "event": { + "type": "string", + "enum": [ "terminated" ] + }, + "body": { + "type": "object", + "properties": { + "restart": { + "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], + "description": "A debug adapter may set 'restart' to true (or to an arbitrary object) to request that the front end restarts the session.\nThe value is not interpreted by the client and passed unmodified as an attribute '__restart' to the launchRequest." + } + } + } + }, + "required": [ "event" ] + }] + }, + + "ThreadEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "Event message for 'thread' event type.\nThe event indicates that a thread has started or exited.", + "properties": { + "event": { + "type": "string", + "enum": [ "thread" ] + }, + "body": { + "type": "object", + "properties": { + "reason": { + "type": "string", + "description": "The reason for the event.", + "_enum": [ "started", "exited" ] + }, + "threadId": { + "type": "integer", + "description": "The identifier of the thread." + } + }, + "required": ["reason", "threadId"] + } + }, + "required": [ "event", "body" ] + }] + }, + + "OutputEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "Event message for 'output' event type.\nThe event indicates that the target has produced some output.", + "properties": { + "event": { + "type": "string", + "enum": [ "output" ] + }, + "body": { + "type": "object", + "properties": { + "category": { + "type": "string", + "description": "The output category. If not specified, 'console' is assumed.", + "_enum": [ "console", "stdout", "stderr", "telemetry" ] + }, + "output": { + "type": "string", + "description": "The output to report." + }, + "variablesReference": { + "type": "number", + "description": "If an attribute 'variablesReference' exists and its value is > 0, the output contains objects which can be retrieved by passing variablesReference to the VariablesRequest." + }, + "source": { + "$ref": "#/definitions/Source", + "description": "An optional source location where the output was produced." + }, + "line": { + "type": "integer", + "description": "An optional source location line where the output was produced." + }, + "column": { + "type": "integer", + "description": "An optional source location column where the output was produced." + }, + "data": { + "type": [ "array", "boolean", "integer", "null", "number" , "object", "string" ], + "description": "Optional data to report. For the 'telemetry' category the data will be sent to telemetry, for the other categories the data is shown in JSON format." + } + }, + "required": ["output"] + } + }, + "required": [ "event", "body" ] + }] + }, + + "BreakpointEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "Event message for 'breakpoint' event type.\nThe event indicates that some information about a breakpoint has changed.", + "properties": { + "event": { + "type": "string", + "enum": [ "breakpoint" ] + }, + "body": { + "type": "object", + "properties": { + "reason": { + "type": "string", + "description": "The reason for the event.", + "_enum": [ "changed", "new", "removed" ] + }, + "breakpoint": { + "$ref": "#/definitions/Breakpoint", + "description": "The breakpoint." + } + }, + "required": [ "reason", "breakpoint" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "ModuleEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "Event message for 'module' event type.\nThe event indicates that some information about a module has changed.", + "properties": { + "event": { + "type": "string", + "enum": [ "module" ] + }, + "body": { + "type": "object", + "properties": { + "reason": { + "type": "string", + "description": "The reason for the event.", + "enum": [ "new", "changed", "removed" ] + }, + "module": { + "$ref": "#/definitions/Module", + "description": "The new, changed, or removed module. In case of 'removed' only the module id is used." + } + }, + "required": [ "reason", "module" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "LoadedSourceEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "Event message for 'loadedSource' event type.\nThe event indicates that some source has been added, changed, or removed from the set of all loaded sources.", + "properties": { + "event": { + "type": "string", + "enum": [ "loadedSource" ] + }, + "body": { + "type": "object", + "properties": { + "reason": { + "type": "string", + "description": "The reason for the event.", + "enum": [ "new", "changed", "removed" ] + }, + "source": { + "$ref": "#/definitions/Source", + "description": "The new, changed, or removed source." + } + }, + "required": [ "reason", "source" ] + } + }, + "required": [ "event", "body" ] + }] + }, + + "ProcessEvent": { + "allOf": [ + { "$ref": "#/definitions/Event" }, + { + "type": "object", + "description": "Event message for 'process' event type.\nThe event indicates that the debugger has begun debugging a new process. Either one that it has launched, or one that it has attached to.", + "properties": { + "event": { + "type": "string", + "enum": [ "process" ] + }, + "body": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The logical name of the process. This is usually the full path to process's executable file. Example: /home/example/myproj/program.js." + }, + "systemProcessId": { + "type": "integer", + "description": "The system process id of the debugged process. This property will be missing for non-system processes." + }, + "isLocalProcess": { + "type": "boolean", + "description": "If true, the process is running on the same computer as the debug adapter." + }, + "startMethod": { + "type": "string", + "enum": [ "launch", "attach", "attachForSuspendedLaunch" ], + "description": "Describes how the debug engine started debugging this process.", + "enumDescriptions": [ + "Process was launched under the debugger.", + "Debugger attached to an existing process.", + "A project launcher component has launched a new process in a suspended state and then asked the debugger to attach." + ] + } + }, + "required": [ "name" ] + } + }, + "required": [ "event", "body" ] + } + ] + }, + + "RunInTerminalRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "runInTerminal request; value of command field is 'runInTerminal'.\nWith this request a debug adapter can run a command in a terminal.", + "properties": { + "command": { + "type": "string", + "enum": [ "runInTerminal" ] + }, + "arguments": { + "$ref": "#/definitions/RunInTerminalRequestArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "RunInTerminalRequestArguments": { + "type": "object", + "description": "Arguments for 'runInTerminal' request.", + "properties": { + "kind": { + "type": "string", + "enum": [ "integrated", "external" ], + "description": "What kind of terminal to launch." + }, + "title": { + "type": "string", + "description": "Optional title of the terminal." + }, + "cwd": { + "type": "string", + "description": "Working directory of the command." + }, + "args": { + "type": "array", + "items": { + "type": "string" + }, + "description": "List of arguments. The first argument is the command to run." + }, + "env": { + "type": "object", + "description": "Environment key-value pairs that are added to or removed from the default environment.", + "additionalProperties": { + "type": [ "string", "null" ], + "description": "Proper values must be strings. A value of 'null' removes the variable from the environment." + } + } + }, + "required": [ "args", "cwd" ] + }, + "RunInTerminalResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to Initialize request.", + "properties": { + "body": { + "type": "object", + "properties": { + "processId": { + "type": "number", + "description": "The process ID." + } + } + } + }, + "required": [ "body" ] + }] + }, + + "ErrorResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "On error that is whenever 'success' is false, the body can provide more details.", + "properties": { + "body": { + "type": "object", + "properties": { + "error": { + "$ref": "#/definitions/Message", + "description": "An optional, structured error message." + } + } + } + }, + "required": [ "body" ] + }] + }, + + "InitializeRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Initialize request; value of command field is 'initialize'.", + "properties": { + "command": { + "type": "string", + "enum": [ "initialize" ] + }, + "arguments": { + "$ref": "#/definitions/InitializeRequestArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "InitializeRequestArguments": { + "type": "object", + "description": "Arguments for 'initialize' request.", + "properties": { + "clientID": { + "type": "string", + "description": "The ID of the (frontend) client using this adapter." + }, + "adapterID": { + "type": "string", + "description": "The ID of the debug adapter." + }, + "locale": { + "type": "string", + "description": "The ISO-639 locale of the (frontend) client using this adapter, e.g. en-US or de-CH." + }, + "linesStartAt1": { + "type": "boolean", + "description": "If true all line numbers are 1-based (default)." + }, + "columnsStartAt1": { + "type": "boolean", + "description": "If true all column numbers are 1-based (default)." + }, + "pathFormat": { + "type": "string", + "_enum": [ "path", "uri" ], + "description": "Determines in what format paths are specified. The default is 'path', which is the native format." + }, + "supportsVariableType": { + "type": "boolean", + "description": "Client supports the optional type attribute for variables." + }, + "supportsVariablePaging": { + "type": "boolean", + "description": "Client supports the paging of variables." + }, + "supportsRunInTerminalRequest": { + "type": "boolean", + "description": "Client supports the runInTerminal request." + } + }, + "required": [ "adapterID" ] + }, + "InitializeResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'initialize' request.", + "properties": { + "body": { + "$ref": "#/definitions/Capabilities", + "description": "The capabilities of this debug adapter." + } + } + }] + }, + + "ConfigurationDoneRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "ConfigurationDone request; value of command field is 'configurationDone'.\nThe client of the debug protocol must send this request at the end of the sequence of configuration requests (which was started by the InitializedEvent).", + "properties": { + "command": { + "type": "string", + "enum": [ "configurationDone" ] + }, + "arguments": { + "$ref": "#/definitions/ConfigurationDoneArguments" + } + }, + "required": [ "command" ] + }] + }, + "ConfigurationDoneArguments": { + "type": "object", + "description": "Arguments for 'configurationDone' request.\nThe configurationDone request has no standardized attributes." + }, + "ConfigurationDoneResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'configurationDone' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "LaunchRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Launch request; value of command field is 'launch'.", + "properties": { + "command": { + "type": "string", + "enum": [ "launch" ] + }, + "arguments": { + "$ref": "#/definitions/LaunchRequestArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "LaunchRequestArguments": { + "type": "object", + "description": "Arguments for 'launch' request.", + "properties": { + "noDebug": { + "type": "boolean", + "description": "If noDebug is true the launch request should launch the program without enabling debugging." + } + } + }, + "LaunchResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'launch' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "AttachRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Attach request; value of command field is 'attach'.", + "properties": { + "command": { + "type": "string", + "enum": [ "attach" ] + }, + "arguments": { + "$ref": "#/definitions/AttachRequestArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "AttachRequestArguments": { + "type": "object", + "description": "Arguments for 'attach' request.\nThe attach request has no standardized attributes." + }, + "AttachResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'attach' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "RestartRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Restart request; value of command field is 'restart'.\nRestarts a debug session. If the capability 'supportsRestartRequest' is missing or has the value false,\nthe client will implement 'restart' by terminating the debug adapter first and then launching it anew.\nA debug adapter can override this default behaviour by implementing a restart request\nand setting the capability 'supportsRestartRequest' to true.", + "properties": { + "command": { + "type": "string", + "enum": [ "restart" ] + }, + "arguments": { + "$ref": "#/definitions/RestartArguments" + } + }, + "required": [ "command" ] + }] + }, + "RestartArguments": { + "type": "object", + "description": "Arguments for 'restart' request.\nThe restart request has no standardized attributes." + }, + "RestartResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'restart' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "DisconnectRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Disconnect request; value of command field is 'disconnect'.", + "properties": { + "command": { + "type": "string", + "enum": [ "disconnect" ] + }, + "arguments": { + "$ref": "#/definitions/DisconnectArguments" + } + }, + "required": [ "command" ] + }] + }, + "DisconnectArguments": { + "type": "object", + "description": "Arguments for 'disconnect' request.", + "properties": { + "terminateDebuggee": { + "type": "boolean", + "description": "Indicates whether the debuggee should be terminated when the debugger is disconnected.\nIf unspecified, the debug adapter is free to do whatever it thinks is best.\nA client can only rely on this attribute being properly honored if a debug adapter returns true for the 'supportTerminateDebuggee' capability." + } + } + }, + "DisconnectResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'disconnect' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "SetBreakpointsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "SetBreakpoints request; value of command field is 'setBreakpoints'.\nSets multiple breakpoints for a single source and clears all previous breakpoints in that source.\nTo clear all breakpoint for a source, specify an empty array.\nWhen a breakpoint is hit, a StoppedEvent (event type 'breakpoint') is generated.", + "properties": { + "command": { + "type": "string", + "enum": [ "setBreakpoints" ] + }, + "arguments": { + "$ref": "#/definitions/SetBreakpointsArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "SetBreakpointsArguments": { + "type": "object", + "description": "Arguments for 'setBreakpoints' request.", + "properties": { + "source": { + "$ref": "#/definitions/Source", + "description": "The source location of the breakpoints; either source.path or source.reference must be specified." + }, + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/SourceBreakpoint" + }, + "description": "The code locations of the breakpoints." + }, + "lines": { + "type": "array", + "items": { + "type": "integer" + }, + "description": "Deprecated: The code locations of the breakpoints." + }, + "sourceModified": { + "type": "boolean", + "description": "A value of true indicates that the underlying source has been modified which results in new breakpoint locations." + } + }, + "required": [ "source" ] + }, + "SetBreakpointsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'setBreakpoints' request.\nReturned is information about each breakpoint created by this request.\nThis includes the actual code location and whether the breakpoint could be verified.\nThe breakpoints returned are in the same order as the elements of the 'breakpoints'\n(or the deprecated 'lines') in the SetBreakpointsArguments.", + "properties": { + "body": { + "type": "object", + "properties": { + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/Breakpoint" + }, + "description": "Information about the breakpoints. The array elements are in the same order as the elements of the 'breakpoints' (or the deprecated 'lines') in the SetBreakpointsArguments." + } + }, + "required": [ "breakpoints" ] + } + }, + "required": [ "body" ] + }] + }, + + "SetFunctionBreakpointsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "SetFunctionBreakpoints request; value of command field is 'setFunctionBreakpoints'.\nSets multiple function breakpoints and clears all previous function breakpoints.\nTo clear all function breakpoint, specify an empty array.\nWhen a function breakpoint is hit, a StoppedEvent (event type 'function breakpoint') is generated.", + "properties": { + "command": { + "type": "string", + "enum": [ "setFunctionBreakpoints" ] + }, + "arguments": { + "$ref": "#/definitions/SetFunctionBreakpointsArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "SetFunctionBreakpointsArguments": { + "type": "object", + "description": "Arguments for 'setFunctionBreakpoints' request.", + "properties": { + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/FunctionBreakpoint" + }, + "description": "The function names of the breakpoints." + } + }, + "required": [ "breakpoints" ] + }, + "SetFunctionBreakpointsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'setFunctionBreakpoints' request.\nReturned is information about each breakpoint created by this request.", + "properties": { + "body": { + "type": "object", + "properties": { + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/Breakpoint" + }, + "description": "Information about the breakpoints. The array elements correspond to the elements of the 'breakpoints' array." + } + }, + "required": [ "breakpoints" ] + } + }, + "required": [ "body" ] + }] + }, + + "SetExceptionBreakpointsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "SetExceptionBreakpoints request; value of command field is 'setExceptionBreakpoints'.\nThe request configures the debuggers response to thrown exceptions. If an exception is configured to break, a StoppedEvent is fired (event type 'exception').", + "properties": { + "command": { + "type": "string", + "enum": [ "setExceptionBreakpoints" ] + }, + "arguments": { + "$ref": "#/definitions/SetExceptionBreakpointsArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "SetExceptionBreakpointsArguments": { + "type": "object", + "description": "Arguments for 'setExceptionBreakpoints' request.", + "properties": { + "filters": { + "type": "array", + "items": { + "type": "string" + }, + "description": "IDs of checked exception options. The set of IDs is returned via the 'exceptionBreakpointFilters' capability." + }, + "exceptionOptions": { + "type": "array", + "items": { + "$ref": "#/definitions/ExceptionOptions" + }, + "description": "Configuration options for selected exceptions." + } + }, + "required": [ "filters" ] + }, + "SetExceptionBreakpointsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'setExceptionBreakpoints' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "ContinueRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Continue request; value of command field is 'continue'.\nThe request starts the debuggee to run again.", + "properties": { + "command": { + "type": "string", + "enum": [ "continue" ] + }, + "arguments": { + "$ref": "#/definitions/ContinueArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "ContinueArguments": { + "type": "object", + "description": "Arguments for 'continue' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Continue execution for the specified thread (if possible). If the backend cannot continue on a single thread but will continue on all threads, it should set the allThreadsContinued attribute in the response to true." + } + }, + "required": [ "threadId" ] + }, + "ContinueResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'continue' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "allThreadsContinued": { + "type": "boolean", + "description": "If true, the continue request has ignored the specified thread and continued all threads instead. If this attribute is missing a value of 'true' is assumed for backward compatibility." + } + } + } + }, + "required": [ "body" ] + }] + }, + + "NextRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Next request; value of command field is 'next'.\nThe request starts the debuggee to run again for one step.\nThe debug adapter first sends the NextResponse and then a StoppedEvent (event type 'step') after the step has completed.", + "properties": { + "command": { + "type": "string", + "enum": [ "next" ] + }, + "arguments": { + "$ref": "#/definitions/NextArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "NextArguments": { + "type": "object", + "description": "Arguments for 'next' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Execute 'next' for this thread." + } + }, + "required": [ "threadId" ] + }, + "NextResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'next' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "StepInRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "StepIn request; value of command field is 'stepIn'.\nThe request starts the debuggee to step into a function/method if possible.\nIf it cannot step into a target, 'stepIn' behaves like 'next'.\nThe debug adapter first sends the StepInResponse and then a StoppedEvent (event type 'step') after the step has completed.\nIf there are multiple function/method calls (or other targets) on the source line,\nthe optional argument 'targetId' can be used to control into which target the 'stepIn' should occur.\nThe list of possible targets for a given source line can be retrieved via the 'stepInTargets' request.", + "properties": { + "command": { + "type": "string", + "enum": [ "stepIn" ] + }, + "arguments": { + "$ref": "#/definitions/StepInArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "StepInArguments": { + "type": "object", + "description": "Arguments for 'stepIn' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Execute 'stepIn' for this thread." + }, + "targetId": { + "type": "integer", + "description": "Optional id of the target to step into." + } + }, + "required": [ "threadId" ] + }, + "StepInResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'stepIn' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "StepOutRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "StepOut request; value of command field is 'stepOut'.\nThe request starts the debuggee to run again for one step.\nThe debug adapter first sends the StepOutResponse and then a StoppedEvent (event type 'step') after the step has completed.", + "properties": { + "command": { + "type": "string", + "enum": [ "stepOut" ] + }, + "arguments": { + "$ref": "#/definitions/StepOutArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "StepOutArguments": { + "type": "object", + "description": "Arguments for 'stepOut' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Execute 'stepOut' for this thread." + } + }, + "required": [ "threadId" ] + }, + "StepOutResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'stepOut' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "StepBackRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "StepBack request; value of command field is 'stepBack'.\nThe request starts the debuggee to run one step backwards.\nThe debug adapter first sends the StepBackResponse and then a StoppedEvent (event type 'step') after the step has completed. Clients should only call this request if the capability supportsStepBack is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "stepBack" ] + }, + "arguments": { + "$ref": "#/definitions/StepBackArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "StepBackArguments": { + "type": "object", + "description": "Arguments for 'stepBack' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Exceute 'stepBack' for this thread." + } + }, + "required": [ "threadId" ] + }, + "StepBackResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'stepBack' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "ReverseContinueRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "ReverseContinue request; value of command field is 'reverseContinue'.\nThe request starts the debuggee to run backward. Clients should only call this request if the capability supportsStepBack is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "reverseContinue" ] + }, + "arguments": { + "$ref": "#/definitions/ReverseContinueArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "ReverseContinueArguments": { + "type": "object", + "description": "Arguments for 'reverseContinue' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Exceute 'reverseContinue' for this thread." + } + }, + "required": [ "threadId" ] + }, + "ReverseContinueResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'reverseContinue' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "RestartFrameRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "RestartFrame request; value of command field is 'restartFrame'.\nThe request restarts execution of the specified stackframe.\nThe debug adapter first sends the RestartFrameResponse and then a StoppedEvent (event type 'restart') after the restart has completed.", + "properties": { + "command": { + "type": "string", + "enum": [ "restartFrame" ] + }, + "arguments": { + "$ref": "#/definitions/RestartFrameArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "RestartFrameArguments": { + "type": "object", + "description": "Arguments for 'restartFrame' request.", + "properties": { + "frameId": { + "type": "integer", + "description": "Restart this stackframe." + } + }, + "required": [ "frameId" ] + }, + "RestartFrameResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'restartFrame' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "GotoRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Goto request; value of command field is 'goto'.\nThe request sets the location where the debuggee will continue to run.\nThis makes it possible to skip the execution of code or to executed code again.\nThe code between the current location and the goto target is not executed but skipped.\nThe debug adapter first sends the GotoResponse and then a StoppedEvent (event type 'goto').", + "properties": { + "command": { + "type": "string", + "enum": [ "goto" ] + }, + "arguments": { + "$ref": "#/definitions/GotoArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "GotoArguments": { + "type": "object", + "description": "Arguments for 'goto' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Set the goto target for this thread." + }, + "targetId": { + "type": "integer", + "description": "The location where the debuggee will continue to run." + } + }, + "required": [ "threadId", "targetId" ] + }, + "GotoResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'goto' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "PauseRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Pause request; value of command field is 'pause'.\nThe request suspenses the debuggee.\nThe debug adapter first sends the PauseResponse and then a StoppedEvent (event type 'pause') after the thread has been paused successfully.", + "properties": { + "command": { + "type": "string", + "enum": [ "pause" ] + }, + "arguments": { + "$ref": "#/definitions/PauseArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "PauseArguments": { + "type": "object", + "description": "Arguments for 'pause' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Pause execution for this thread." + } + }, + "required": [ "threadId" ] + }, + "PauseResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'pause' request. This is just an acknowledgement, so no body field is required." + }] + }, + + "StackTraceRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "StackTrace request; value of command field is 'stackTrace'. The request returns a stacktrace from the current execution state.", + "properties": { + "command": { + "type": "string", + "enum": [ "stackTrace" ] + }, + "arguments": { + "$ref": "#/definitions/StackTraceArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "StackTraceArguments": { + "type": "object", + "description": "Arguments for 'stackTrace' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Retrieve the stacktrace for this thread." + }, + "startFrame": { + "type": "integer", + "description": "The index of the first frame to return; if omitted frames start at 0." + }, + "levels": { + "type": "integer", + "description": "The maximum number of frames to return. If levels is not specified or 0, all frames are returned." + }, + "format": { + "$ref": "#/definitions/StackFrameFormat", + "description": "Specifies details on how to format the stack frames." + } + }, + "required": [ "threadId" ] + }, + "StackTraceResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'stackTrace' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "stackFrames": { + "type": "array", + "items": { + "$ref": "#/definitions/StackFrame" + }, + "description": "The frames of the stackframe. If the array has length zero, there are no stackframes available.\nThis means that there is no location information available." + }, + "totalFrames": { + "type": "integer", + "description": "The total number of frames available." + } + }, + "required": [ "stackFrames" ] + } + }, + "required": [ "body" ] + }] + }, + + "ScopesRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Scopes request; value of command field is 'scopes'.\nThe request returns the variable scopes for a given stackframe ID.", + "properties": { + "command": { + "type": "string", + "enum": [ "scopes" ] + }, + "arguments": { + "$ref": "#/definitions/ScopesArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "ScopesArguments": { + "type": "object", + "description": "Arguments for 'scopes' request.", + "properties": { + "frameId": { + "type": "integer", + "description": "Retrieve the scopes for this stackframe." + } + }, + "required": [ "frameId" ] + }, + "ScopesResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'scopes' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "scopes": { + "type": "array", + "items": { + "$ref": "#/definitions/Scope" + }, + "description": "The scopes of the stackframe. If the array has length zero, there are no scopes available." + } + }, + "required": [ "scopes" ] + } + }, + "required": [ "body" ] + }] + }, + + "VariablesRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Variables request; value of command field is 'variables'.\nRetrieves all child variables for the given variable reference.\nAn optional filter can be used to limit the fetched children to either named or indexed children.", + "properties": { + "command": { + "type": "string", + "enum": [ "variables" ] + }, + "arguments": { + "$ref": "#/definitions/VariablesArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "VariablesArguments": { + "type": "object", + "description": "Arguments for 'variables' request.", + "properties": { + "variablesReference": { + "type": "integer", + "description": "The Variable reference." + }, + "filter": { + "type": "string", + "enum": [ "indexed", "named" ], + "description": "Optional filter to limit the child variables to either named or indexed. If ommited, both types are fetched." + }, + "start": { + "type": "integer", + "description": "The index of the first variable to return; if omitted children start at 0." + }, + "count": { + "type": "integer", + "description": "The number of variables to return. If count is missing or 0, all variables are returned." + }, + "format": { + "$ref": "#/definitions/ValueFormat", + "description": "Specifies details on how to format the Variable values." + } + }, + "required": [ "variablesReference" ] + }, + "VariablesResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'variables' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "variables": { + "type": "array", + "items": { + "$ref": "#/definitions/Variable" + }, + "description": "All (or a range) of variables for the given variable reference." + } + }, + "required": [ "variables" ] + } + }, + "required": [ "body" ] + }] + }, + + "SetVariableRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "setVariable request; value of command field is 'setVariable'.\nSet the variable with the given name in the variable container to a new value.", + "properties": { + "command": { + "type": "string", + "enum": [ "setVariable" ] + }, + "arguments": { + "$ref": "#/definitions/SetVariableArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "SetVariableArguments": { + "type": "object", + "description": "Arguments for 'setVariable' request.", + "properties": { + "variablesReference": { + "type": "integer", + "description": "The reference of the variable container." + }, + "name": { + "type": "string", + "description": "The name of the variable." + }, + "value": { + "type": "string", + "description": "The value of the variable." + }, + "format": { + "$ref": "#/definitions/ValueFormat", + "description": "Specifies details on how to format the response value." + } + }, + "required": [ "variablesReference", "name", "value" ] + }, + "SetVariableResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'setVariable' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "value": { + "type": "string", + "description": "The new value of the variable." + }, + "type": { + "type": "string", + "description": "The type of the new value. Typically shown in the UI when hovering over the value." + }, + "variablesReference": { + "type": "number", + "description": "If variablesReference is > 0, the new value is structured and its children can be retrieved by passing variablesReference to the VariablesRequest." + }, + "namedVariables": { + "type": "number", + "description": "The number of named child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." + }, + "indexedVariables": { + "type": "number", + "description": "The number of indexed child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." + } + }, + "required": [ "value" ] + } + }, + "required": [ "body" ] + }] + }, + + "SourceRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Source request; value of command field is 'source'.\nThe request retrieves the source code for a given source reference.", + "properties": { + "command": { + "type": "string", + "enum": [ "source" ] + }, + "arguments": { + "$ref": "#/definitions/SourceArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "SourceArguments": { + "type": "object", + "description": "Arguments for 'source' request.", + "properties": { + "source": { + "$ref": "#/definitions/Source", + "description": "Specifies the source content to load. Either source.path or source.sourceReference must be specified." + }, + "sourceReference": { + "type": "integer", + "description": "The reference to the source. This is the same as source.sourceReference. This is provided for backward compatibility since old backends do not understand the 'source' attribute." + } + }, + "required": [ "sourceReference" ] + }, + "SourceResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'source' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "content": { + "type": "string", + "description": "Content of the source reference." + }, + "mimeType": { + "type": "string", + "description": "Optional content type (mime type) of the source." + } + }, + "required": [ "content" ] + } + }, + "required": [ "body" ] + }] + }, + + "ThreadsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Thread request; value of command field is 'threads'.\nThe request retrieves a list of all threads.", + "properties": { + "command": { + "type": "string", + "enum": [ "threads" ] + } + }, + "required": [ "command" ] + }] + }, + "ThreadsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'threads' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "threads": { + "type": "array", + "items": { + "$ref": "#/definitions/Thread" + }, + "description": "All threads." + } + }, + "required": [ "threads" ] + } + }, + "required": [ "body" ] + }] + }, + + "ModulesRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Modules can be retrieved from the debug adapter with the ModulesRequest which can either return all modules or a range of modules to support paging.", + "properties": { + "command": { + "type": "string", + "enum": [ "modules" ] + }, + "arguments": { + "$ref": "#/definitions/ModulesArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "ModulesArguments": { + "type": "object", + "description": "Arguments for 'modules' request.", + "properties": { + "startModule": { + "type": "integer", + "description": "The index of the first module to return; if omitted modules start at 0." + }, + "moduleCount": { + "type": "integer", + "description": "The number of modules to return. If moduleCount is not specified or 0, all modules are returned." + } + } + }, + "ModulesResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'modules' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "modules": { + "type": "array", + "items": { + "$ref": "#/definitions/Module" + }, + "description": "All modules or range of modules." + }, + "totalModules": { + "type": "integer", + "description": "The total number of modules available." + } + }, + "required": [ "modules" ] + } + }, + "required": [ "body" ] + }] + }, + + "LoadedSourcesRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Retrieves the set of all sources currently loaded by the debugged process.", + "properties": { + "command": { + "type": "string", + "enum": [ "loadedSources" ] + }, + "arguments": { + "$ref": "#/definitions/LoadedSourcesArguments" + } + }, + "required": [ "command" ] + }] + }, + "LoadedSourcesArguments": { + "type": "object", + "description": "Arguments for 'loadedSources' request.\nThe 'loadedSources' request has no standardized arguments." + }, + "LoadedSourcesResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'loadedSources' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "sources": { + "type": "array", + "items": { + "$ref": "#/definitions/Source" + }, + "description": "Set of loaded sources." + } + }, + "required": [ "sources" ] + } + }, + "required": [ "body" ] + }] + }, + + "EvaluateRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Evaluate request; value of command field is 'evaluate'.\nEvaluates the given expression in the context of the top most stack frame.\nThe expression has access to any variables and arguments that are in scope.", + "properties": { + "command": { + "type": "string", + "enum": [ "evaluate" ] + }, + "arguments": { + "$ref": "#/definitions/EvaluateArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "EvaluateArguments": { + "type": "object", + "description": "Arguments for 'evaluate' request.", + "properties": { + "expression": { + "type": "string", + "description": "The expression to evaluate." + }, + "frameId": { + "type": "integer", + "description": "Evaluate the expression in the scope of this stack frame. If not specified, the expression is evaluated in the global scope." + }, + "context": { + "type": "string", + "_enum": [ "watch", "repl", "hover" ], + "enumDescriptions": [ + "evaluate is run in a watch.", + "evaluate is run from REPL console.", + "evaluate is run from a data hover." + ], + "description": "The context in which the evaluate request is run." + }, + "format": { + "$ref": "#/definitions/ValueFormat", + "description": "Specifies details on how to format the Evaluate result." + } + }, + "required": [ "expression" ] + }, + "EvaluateResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'evaluate' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "result": { + "type": "string", + "description": "The result of the evaluate request." + }, + "type": { + "type": "string", + "description": "The optional type of the evaluate result." + }, + "presentationHint": { + "$ref": "#/definitions/VariablePresentationHint", + "description": "Properties of a evaluate result that can be used to determine how to render the result in the UI." + }, + "variablesReference": { + "type": "number", + "description": "If variablesReference is > 0, the evaluate result is structured and its children can be retrieved by passing variablesReference to the VariablesRequest." + }, + "namedVariables": { + "type": "number", + "description": "The number of named child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." + }, + "indexedVariables": { + "type": "number", + "description": "The number of indexed child variables.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." + } + }, + "required": [ "result", "variablesReference" ] + } + }, + "required": [ "body" ] + }] + }, + + "StepInTargetsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "StepInTargets request; value of command field is 'stepInTargets'.\nThis request retrieves the possible stepIn targets for the specified stack frame.\nThese targets can be used in the 'stepIn' request.\nThe StepInTargets may only be called if the 'supportsStepInTargetsRequest' capability exists and is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "stepInTargets" ] + }, + "arguments": { + "$ref": "#/definitions/StepInTargetsArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "StepInTargetsArguments": { + "type": "object", + "description": "Arguments for 'stepInTargets' request.", + "properties": { + "frameId": { + "type": "integer", + "description": "The stack frame for which to retrieve the possible stepIn targets." + } + }, + "required": [ "frameId" ] + }, + "StepInTargetsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'stepInTargets' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "targets": { + "type": "array", + "items": { + "$ref": "#/definitions/StepInTarget" + }, + "description": "The possible stepIn targets of the specified source location." + } + }, + "required": [ "targets" ] + } + }, + "required": [ "body" ] + }] + }, + + "GotoTargetsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "GotoTargets request; value of command field is 'gotoTargets'.\nThis request retrieves the possible goto targets for the specified source location.\nThese targets can be used in the 'goto' request.\nThe GotoTargets request may only be called if the 'supportsGotoTargetsRequest' capability exists and is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "gotoTargets" ] + }, + "arguments": { + "$ref": "#/definitions/GotoTargetsArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "GotoTargetsArguments": { + "type": "object", + "description": "Arguments for 'gotoTargets' request.", + "properties": { + "source": { + "$ref": "#/definitions/Source", + "description": "The source location for which the goto targets are determined." + }, + "line": { + "type": "integer", + "description": "The line location for which the goto targets are determined." + }, + "column": { + "type": "integer", + "description": "An optional column location for which the goto targets are determined." + } + }, + "required": [ "source", "line" ] + }, + "GotoTargetsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'gotoTargets' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "targets": { + "type": "array", + "items": { + "$ref": "#/definitions/GotoTarget" + }, + "description": "The possible goto targets of the specified location." + } + }, + "required": [ "targets" ] + } + }, + "required": [ "body" ] + }] + }, + + "CompletionsRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "CompletionsRequest request; value of command field is 'completions'.\nReturns a list of possible completions for a given caret position and text.\nThe CompletionsRequest may only be called if the 'supportsCompletionsRequest' capability exists and is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "completions" ] + }, + "arguments": { + "$ref": "#/definitions/CompletionsArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "CompletionsArguments": { + "type": "object", + "description": "Arguments for 'completions' request.", + "properties": { + "frameId": { + "type": "integer", + "description": "Returns completions in the scope of this stack frame. If not specified, the completions are returned for the global scope." + }, + "text": { + "type": "string", + "description": "One or more source lines. Typically this is the text a user has typed into the debug console before he asked for completion." + }, + "column": { + "type": "integer", + "description": "The character position for which to determine the completion proposals." + }, + "line": { + "type": "integer", + "description": "An optional line for which to determine the completion proposals. If missing the first line of the text is assumed." + } + }, + "required": [ "text", "column" ] + }, + "CompletionsResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'completions' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "targets": { + "type": "array", + "items": { + "$ref": "#/definitions/CompletionItem" + }, + "description": "The possible completions for ." + } + }, + "required": [ "targets" ] + } + }, + "required": [ "body" ] + }] + }, + + "ExceptionInfoRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "ExceptionInfoRequest request; value of command field is 'exceptionInfo'.\nRetrieves the details of the exception that caused the StoppedEvent to be raised.", + "properties": { + "command": { + "type": "string", + "enum": [ "exceptionInfo" ] + }, + "arguments": { + "$ref": "#/definitions/ExceptionInfoArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "ExceptionInfoArguments": { + "type": "object", + "description": "Arguments for 'exceptionInfo' request.", + "properties": { + "threadId": { + "type": "integer", + "description": "Thread for which exception information should be retrieved." + } + }, + "required": [ "threadId" ] + }, + "ExceptionInfoResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'exceptionInfo' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "exceptionId": { + "type": "string", + "description": "ID of the exception that was thrown." + }, + "description": { + "type": "string", + "description": "Descriptive text for the exception provided by the debug adapter." + }, + "breakMode": { + "$ref": "#/definitions/ExceptionBreakMode", + "description": "Mode that caused the exception notification to be raised." + }, + "details": { + "$ref": "#/definitions/ExceptionDetails", + "description": "Detailed information about the exception." + } + }, + "required": [ "exceptionId", "breakMode" ] + } + }, + "required": [ "body" ] + }] + }, + + "Capabilities": { + "type": "object", + "description": "Information about the capabilities of a debug adapter.", + "properties": { + "supportsConfigurationDoneRequest": { + "type": "boolean", + "description": "The debug adapter supports the configurationDoneRequest." + }, + "supportsFunctionBreakpoints": { + "type": "boolean", + "description": "The debug adapter supports function breakpoints." + }, + "supportsConditionalBreakpoints": { + "type": "boolean", + "description": "The debug adapter supports conditional breakpoints." + }, + "supportsHitConditionalBreakpoints": { + "type": "boolean", + "description": "The debug adapter supports breakpoints that break execution after a specified number of hits." + }, + "supportsEvaluateForHovers": { + "type": "boolean", + "description": "The debug adapter supports a (side effect free) evaluate request for data hovers." + }, + "exceptionBreakpointFilters": { + "type": "array", + "items": { + "$ref": "#/definitions/ExceptionBreakpointsFilter" + }, + "description": "Available filters or options for the setExceptionBreakpoints request." + }, + "supportsStepBack": { + "type": "boolean", + "description": "The debug adapter supports stepping back via the stepBack and reverseContinue requests." + }, + "supportsSetVariable": { + "type": "boolean", + "description": "The debug adapter supports setting a variable to a value." + }, + "supportsRestartFrame": { + "type": "boolean", + "description": "The debug adapter supports restarting a frame." + }, + "supportsGotoTargetsRequest": { + "type": "boolean", + "description": "The debug adapter supports the gotoTargetsRequest." + }, + "supportsStepInTargetsRequest": { + "type": "boolean", + "description": "The debug adapter supports the stepInTargetsRequest." + }, + "supportsCompletionsRequest": { + "type": "boolean", + "description": "The debug adapter supports the completionsRequest." + }, + "supportsModulesRequest": { + "type": "boolean", + "description": "The debug adapter supports the modules request." + }, + "additionalModuleColumns": { + "type": "array", + "items": { + "$ref": "#/definitions/ColumnDescriptor" + }, + "description": "The set of additional module information exposed by the debug adapter." + }, + "supportedChecksumAlgorithms": { + "type": "array", + "items": { + "$ref": "#/definitions/ChecksumAlgorithm" + }, + "description": "Checksum algorithms supported by the debug adapter." + }, + "supportsRestartRequest": { + "type": "boolean", + "description": "The debug adapter supports the RestartRequest. In this case a client should not implement 'restart' by terminating and relaunching the adapter but by calling the RestartRequest." + }, + "supportsExceptionOptions": { + "type": "boolean", + "description": "The debug adapter supports 'exceptionOptions' on the setExceptionBreakpoints request." + }, + "supportsValueFormattingOptions": { + "type": "boolean", + "description": "The debug adapter supports a 'format' attribute on the stackTraceRequest, variablesRequest, and evaluateRequest." + }, + "supportsExceptionInfoRequest": { + "type": "boolean", + "description": "The debug adapter supports the exceptionInfo request." + }, + "supportTerminateDebuggee": { + "type": "boolean", + "description": "The debug adapter supports the 'terminateDebuggee' attribute on the 'disconnect' request." + }, + "supportsDelayedStackTraceLoading": { + "type": "boolean", + "description": "The debug adapter supports the delayed loading of parts of the stack, which requires that both the 'startFrame' and 'levels' arguments and the 'totalFrames' result of the 'StackTrace' request are supported." + }, + "supportsLoadedSourcesRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'loadedSources' request." + } + } + }, + + "ExceptionBreakpointsFilter": { + "type": "object", + "description": "An ExceptionBreakpointsFilter is shown in the UI as an option for configuring how exceptions are dealt with.", + "properties": { + "filter": { + "type": "string", + "description": "The internal ID of the filter. This value is passed to the setExceptionBreakpoints request." + }, + "label": { + "type": "string", + "description": "The name of the filter. This will be shown in the UI." + }, + "default": { + "type": "boolean", + "description": "Initial value of the filter. If not specified a value 'false' is assumed." + } + }, + "required": [ "filter", "label" ] + }, + + "Message": { + "type": "object", + "description": "A structured message object. Used to return errors from requests.", + "properties": { + "id": { + "type": "integer", + "description": "Unique identifier for the message." + }, + "format": { + "type": "string", + "description": "A format string for the message. Embedded variables have the form '{name}'.\nIf variable name starts with an underscore character, the variable does not contain user data (PII) and can be safely used for telemetry purposes." + }, + "variables": { + "type": "object", + "description": "An object used as a dictionary for looking up the variables in the format string.", + "additionalProperties": { + "type": "string", + "description": "Values must be strings." + } + }, + "sendTelemetry": { + "type": "boolean", + "description": "If true send to telemetry." + }, + "showUser": { + "type": "boolean", + "description": "If true show user." + }, + "url": { + "type": "string", + "description": "An optional url where additional information about this message can be found." + }, + "urlLabel": { + "type": "string", + "description": "An optional label that is presented to the user as the UI for opening the url." + } + }, + "required": [ "id", "format" ] + }, + + "Module": { + "type": "object", + "description": "A Module object represents a row in the modules view.\nTwo attributes are mandatory: an id identifies a module in the modules view and is used in a ModuleEvent for identifying a module for adding, updating or deleting.\nThe name is used to minimally render the module in the UI.\n\nAdditional attributes can be added to the module. They will show up in the module View if they have a corresponding ColumnDescriptor.\n\nTo avoid an unnecessary proliferation of additional attributes with similar semantics but different names\nwe recommend to re-use attributes from the 'recommended' list below first, and only introduce new attributes if nothing appropriate could be found.", + "properties": { + "id": { + "type": ["integer", "string"], + "description": "Unique identifier for the module." + }, + "name": { + "type": "string", + "description": "A name of the module." + }, + "path": { + "type": "string", + "description": "optional but recommended attributes.\nalways try to use these first before introducing additional attributes.\n\nLogical full path to the module. The exact definition is implementation defined, but usually this would be a full path to the on-disk file for the module." + }, + "isOptimized": { + "type": "boolean", + "description": "True if the module is optimized." + }, + "isUserCode": { + "type": "boolean", + "description": "True if the module is considered 'user code' by a debugger that supports 'Just My Code'." + }, + "version": { + "type": "string", + "description": "Version of Module." + }, + "symbolStatus": { + "type": "string", + "description": "User understandable description of if symbols were found for the module (ex: 'Symbols Loaded', 'Symbols not found', etc." + }, + "symbolFilePath": { + "type": "string", + "description": "Logical full path to the symbol file. The exact definition is implementation defined." + }, + "dateTimeStamp": { + "type": "string", + "description": "Module created or modified." + }, + "addressRange": { + "type": "string", + "description": "Address range covered by this module." + } + }, + "required": [ "id", "name" ] + }, + + "ColumnDescriptor": { + "type": "object", + "description": "A ColumnDescriptor specifies what module attribute to show in a column of the ModulesView, how to format it, and what the column's label should be.\nIt is only used if the underlying UI actually supports this level of customization.", + "properties": { + "attributeName": { + "type": "string", + "description": "Name of the attribute rendered in this column." + }, + "label": { + "type": "string", + "description": "Header UI label of column." + }, + "format": { + "type": "string", + "description": "Format to use for the rendered values in this column. TBD how the format strings looks like." + }, + "type": { + "type": "string", + "enum": [ "string", "number", "boolean", "unixTimestampUTC" ], + "description": "Datatype of values in this column. Defaults to 'string' if not specified." + }, + "width": { + "type": "integer", + "description": "Width of this column in characters (hint only)." + } + }, + "required": [ "attributeName", "label"] + }, + + "ModulesViewDescriptor": { + "type": "object", + "description": "The ModulesViewDescriptor is the container for all declarative configuration options of a ModuleView.\nFor now it only specifies the columns to be shown in the modules view.", + "properties": { + "columns": { + "type": "array", + "items": { + "$ref": "#/definitions/ColumnDescriptor" + } + } + }, + "required": [ "columns" ] + }, + + "Thread": { + "type": "object", + "description": "A Thread", + "properties": { + "id": { + "type": "integer", + "description": "Unique identifier for the thread." + }, + "name": { + "type": "string", + "description": "A name of the thread." + } + }, + "required": [ "id", "name" ] + }, + + "Source": { + "type": "object", + "description": "A Source is a descriptor for source code. It is returned from the debug adapter as part of a StackFrame and it is used by clients when specifying breakpoints.", + "properties": { + "name": { + "type": "string", + "description": "The short name of the source. Every source returned from the debug adapter has a name. When sending a source to the debug adapter this name is optional." + }, + "path": { + "type": "string", + "description": "The path of the source to be shown in the UI. It is only used to locate and load the content of the source if no sourceReference is specified (or its vaule is 0)." + }, + "sourceReference": { + "type": "number", + "description": "If sourceReference > 0 the contents of the source must be retrieved through the SourceRequest (even if a path is specified). A sourceReference is only valid for a session, so it must not be used to persist a source." + }, + "presentationHint": { + "type": "string", + "description": "An optional hint for how to present the source in the UI. A value of 'deemphasize' can be used to indicate that the source is not available or that it is skipped on stepping.", + "enum": [ "normal", "emphasize", "deemphasize" ] + }, + "origin": { + "type": "string", + "description": "The (optional) origin of this source: possible values 'internal module', 'inlined content from source map', etc." + }, + "sources": { + "type": "array", + "items": { + "$ref": "#/definitions/Source" + }, + "description": "An optional list of sources that are related to this source. These may be the source that generated this source." + }, + "adapterData": { + "type": [ "array", "boolean", "integer", "null", "number", "object", "string" ], + "description": "Optional data that a debug adapter might want to loop through the client. The client should leave the data intact and persist it across sessions. The client should not interpret the data." + }, + "checksums": { + "type": "array", + "items": { + "$ref": "#/definitions/Checksum" + }, + "description": "The checksums associated with this file." + } + } + }, + + "StackFrame": { + "type": "object", + "description": "A Stackframe contains the source location.", + "properties": { + "id": { + "type": "integer", + "description": "An identifier for the stack frame. It must be unique across all threads. This id can be used to retrieve the scopes of the frame with the 'scopesRequest' or to restart the execution of a stackframe." + }, + "name": { + "type": "string", + "description": "The name of the stack frame, typically a method name." + }, + "source": { + "$ref": "#/definitions/Source", + "description": "The optional source of the frame." + }, + "line": { + "type": "integer", + "description": "The line within the file of the frame. If source is null or doesn't exist, line is 0 and must be ignored." + }, + "column": { + "type": "integer", + "description": "The column within the line. If source is null or doesn't exist, column is 0 and must be ignored." + }, + "endLine": { + "type": "integer", + "description": "An optional end line of the range covered by the stack frame." + }, + "endColumn": { + "type": "integer", + "description": "An optional end column of the range covered by the stack frame." + }, + "moduleId": { + "type": ["integer", "string"], + "description": "The module associated with this frame, if any." + }, + "presentationHint": { + "type": "string", + "enum": [ "normal", "label", "subtle" ], + "description": "An optional hint for how to present this frame in the UI. A value of 'label' can be used to indicate that the frame is an artificial frame that is used as a visual label or separator. A value of 'subtle' can be used to change the appearance of a frame in a 'subtle' way." + } + }, + "required": [ "id", "name", "line", "column" ] + }, + + "Scope": { + "type": "object", + "description": "A Scope is a named container for variables. Optionally a scope can map to a source or a range within a source.", + "properties": { + "name": { + "type": "string", + "description": "Name of the scope such as 'Arguments', 'Locals'." + }, + "variablesReference": { + "type": "integer", + "description": "The variables of this scope can be retrieved by passing the value of variablesReference to the VariablesRequest." + }, + "namedVariables": { + "type": "integer", + "description": "The number of named variables in this scope.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." + }, + "indexedVariables": { + "type": "integer", + "description": "The number of indexed variables in this scope.\nThe client can use this optional information to present the variables in a paged UI and fetch them in chunks." + }, + "expensive": { + "type": "boolean", + "description": "If true, the number of variables in this scope is large or expensive to retrieve." + }, + "source": { + "$ref": "#/definitions/Source", + "description": "Optional source for this scope." + }, + "line": { + "type": "integer", + "description": "Optional start line of the range covered by this scope." + }, + "column": { + "type": "integer", + "description": "Optional start column of the range covered by this scope." + }, + "endLine": { + "type": "integer", + "description": "Optional end line of the range covered by this scope." + }, + "endColumn": { + "type": "integer", + "description": "Optional end column of the range covered by this scope." + } + }, + "required": [ "name", "variablesReference", "expensive" ] + }, + + "Variable": { + "type": "object", + "description": "A Variable is a name/value pair.\nOptionally a variable can have a 'type' that is shown if space permits or when hovering over the variable's name.\nAn optional 'kind' is used to render additional properties of the variable, e.g. different icons can be used to indicate that a variable is public or private.\nIf the value is structured (has children), a handle is provided to retrieve the children with the VariablesRequest.\nIf the number of named or indexed children is large, the numbers should be returned via the optional 'namedVariables' and 'indexedVariables' attributes.\nThe client can use this optional information to present the children in a paged UI and fetch them in chunks.", + "properties": { + "name": { + "type": "string", + "description": "The variable's name." + }, + "value": { + "type": "string", + "description": "The variable's value. This can be a multi-line text, e.g. for a function the body of a function." + }, + "type": { + "type": "string", + "description": "The type of the variable's value. Typically shown in the UI when hovering over the value." + }, + "presentationHint": { + "$ref": "#/definitions/VariablePresentationHint", + "description": "Properties of a variable that can be used to determine how to render the variable in the UI." + }, + "evaluateName": { + "type": "string", + "description": "Optional evaluatable name of this variable which can be passed to the 'EvaluateRequest' to fetch the variable's value." + }, + "variablesReference": { + "type": "integer", + "description": "If variablesReference is > 0, the variable is structured and its children can be retrieved by passing variablesReference to the VariablesRequest." + }, + "namedVariables": { + "type": "integer", + "description": "The number of named child variables.\nThe client can use this optional information to present the children in a paged UI and fetch them in chunks." + }, + "indexedVariables": { + "type": "integer", + "description": "The number of indexed child variables.\nThe client can use this optional information to present the children in a paged UI and fetch them in chunks." + } + }, + "required": [ "name", "value", "variablesReference" ] + }, + + "VariablePresentationHint": { + "type": "object", + "description": "Optional properties of a variable that can be used to determine how to render the variable in the UI.", + "properties": { + "kind": { + "description": "The kind of variable. Before introducing additional values, try to use the listed values.", + "type": "string", + "_enum": [ "property", "method", "class", "data", "event", "baseClass", "innerClass", "interface", "mostDerivedClass", "virtual" ], + "enumDescriptions": [ + "Indicates that the object is a property.", + "Indicates that the object is a method.", + "Indicates that the object is a class.", + "Indicates that the object is data.", + "Indicates that the object is an event.", + "Indicates that the object is a base class.", + "Indicates that the object is an inner class.", + "Indicates that the object is an interface.", + "Indicates that the object is the most derived class.", + "Indicates that the object is virtual, that means it is a synthetic object introduced by the adapter for rendering purposes, e.g. an index range for large arrays." + ] + }, + "attributes": { + "description": "Set of attributes represented as an array of strings. Before introducing additional values, try to use the listed values.", + "type": "array", + "items": { + "type": "string", + "_enum": [ "static", "constant", "readOnly", "rawString", "hasObjectId", "canHaveObjectId", "hasSideEffects" ], + "enumDescriptions": [ + "Indicates that the object is static.", + "Indicates that the object is a constant.", + "Indicates that the object is read only.", + "Indicates that the object is a raw string.", + "Indicates that the object can have an Object ID created for it.", + "Indicates that the object has an Object ID associated with it.", + "Indicates that the evaluation had side effects." + ] + } + }, + "visibility": { + "description": "Visibility of variable. Before introducing additional values, try to use the listed values.", + "type": "string", + "_enum": [ "public", "private", "protected", "internal", "final" ] + } + } + }, + + "SourceBreakpoint": { + "type": "object", + "description": "Properties of a breakpoint passed to the setBreakpoints request.", + "properties": { + "line": { + "type": "integer", + "description": "The source line of the breakpoint." + }, + "column": { + "type": "integer", + "description": "An optional source column of the breakpoint." + }, + "condition": { + "type": "string", + "description": "An optional expression for conditional breakpoints." + }, + "hitCondition": { + "type": "string", + "description": "An optional expression that controls how many hits of the breakpoint are ignored. The backend is expected to interpret the expression as needed." + } + }, + "required": [ "line" ] + }, + + "FunctionBreakpoint": { + "type": "object", + "description": "Properties of a breakpoint passed to the setFunctionBreakpoints request.", + "properties": { + "name": { + "type": "string", + "description": "The name of the function." + }, + "condition": { + "type": "string", + "description": "An optional expression for conditional breakpoints." + }, + "hitCondition": { + "type": "string", + "description": "An optional expression that controls how many hits of the breakpoint are ignored. The backend is expected to interpret the expression as needed." + } + }, + "required": [ "name" ] + }, + + "Breakpoint": { + "type": "object", + "description": "Information about a Breakpoint created in setBreakpoints or setFunctionBreakpoints.", + "properties": { + "id": { + "type": "integer", + "description": "An optional unique identifier for the breakpoint." + }, + "verified": { + "type": "boolean", + "description": "If true breakpoint could be set (but not necessarily at the desired location)." + }, + "message": { + "type": "string", + "description": "An optional message about the state of the breakpoint. This is shown to the user and can be used to explain why a breakpoint could not be verified." + }, + "source": { + "$ref": "#/definitions/Source", + "description": "The source where the breakpoint is located." + }, + "line": { + "type": "integer", + "description": "The start line of the actual range covered by the breakpoint." + }, + "column": { + "type": "integer", + "description": "An optional start column of the actual range covered by the breakpoint." + }, + "endLine": { + "type": "integer", + "description": "An optional end line of the actual range covered by the breakpoint." + }, + "endColumn": { + "type": "integer", + "description": "An optional end column of the actual range covered by the breakpoint. If no end line is given, then the end column is assumed to be in the start line." + } + }, + "required": [ "verified" ] + }, + + "StepInTarget": { + "type": "object", + "description": "A StepInTarget can be used in the 'stepIn' request and determines into which single target the stepIn request should step.", + "properties": { + "id": { + "type": "integer", + "description": "Unique identifier for a stepIn target." + }, + "label": { + "type": "string", + "description": "The name of the stepIn target (shown in the UI)." + } + }, + "required": [ "id", "label" ] + }, + + "GotoTarget": { + "type": "object", + "description": "A GotoTarget describes a code location that can be used as a target in the 'goto' request.\nThe possible goto targets can be determined via the 'gotoTargets' request.", + "properties": { + "id": { + "type": "integer", + "description": "Unique identifier for a goto target. This is used in the goto request." + }, + "label": { + "type": "string", + "description": "The name of the goto target (shown in the UI)." + }, + "line": { + "type": "integer", + "description": "The line of the goto target." + }, + "column": { + "type": "integer", + "description": "An optional column of the goto target." + }, + "endLine": { + "type": "integer", + "description": "An optional end line of the range covered by the goto target." + }, + "endColumn": { + "type": "integer", + "description": "An optional end column of the range covered by the goto target." + } + }, + "required": [ "id", "label", "line" ] + }, + + "CompletionItem": { + "type": "object", + "description": "CompletionItems are the suggestions returned from the CompletionsRequest.", + "properties": { + "label": { + "type": "string", + "description": "The label of this completion item. By default this is also the text that is inserted when selecting this completion." + }, + "text": { + "type": "string", + "description": "If text is not falsy then it is inserted instead of the label." + }, + "type": { + "$ref": "#/definitions/CompletionItemType", + "description": "The item's type. Typically the client uses this information to render the item in the UI with an icon." + }, + "start": { + "type": "integer", + "description": "This value determines the location (in the CompletionsRequest's 'text' attribute) where the completion text is added.\nIf missing the text is added at the location specified by the CompletionsRequest's 'column' attribute." + }, + "length": { + "type": "integer", + "description": "This value determines how many characters are overwritten by the completion text.\nIf missing the value 0 is assumed which results in the completion text being inserted." + } + }, + "required": [ "label" ] + }, + + "CompletionItemType": { + "type": "string", + "description": "Some predefined types for the CompletionItem. Please note that not all clients have specific icons for all of them.", + "enum": [ "method", "function", "constructor", "field", "variable", "class", "interface", "module", "property", "unit", "value", "enum", "keyword", "snippet", "text", "color", "file", "reference", "customcolor" ] + }, + + "ChecksumAlgorithm": { + "type": "string", + "description": "Names of checksum algorithms that may be supported by a debug adapter.", + "enum": [ "MD5", "SHA1", "SHA256", "timestamp" ] + }, + + "Checksum": { + "type": "object", + "description": "The checksum of an item calculated by the specified algorithm.", + "properties": { + "algorithm": { + "$ref": "#/definitions/ChecksumAlgorithm", + "description": "The algorithm used to calculate this checksum." + }, + "checksum": { + "type": "string", + "description": "Value of the checksum." + } + }, + "required": [ "algorithm", "checksum" ] + }, + + "ValueFormat": { + "type": "object", + "description": "Provides formatting information for a value.", + "properties": { + "hex": { + "type": "boolean", + "description": "Display the value in hex." + } + } + }, + + "StackFrameFormat": { + "allOf": [ { "$ref": "#/definitions/ValueFormat" }, { + "type": "object", + "description": "Provides formatting information for a stack frame.", + "properties": { + "parameters": { + "type": "boolean", + "description": "Displays parameters for the stack frame." + }, + "parameterTypes": { + "type": "boolean", + "description": "Displays the types of parameters for the stack frame." + }, + "parameterNames": { + "type": "boolean", + "description": "Displays the names of parameters for the stack frame." + }, + "parameterValues": { + "type": "boolean", + "description": "Displays the values of parameters for the stack frame." + }, + "line": { + "type": "boolean", + "description": "Displays the line number of the stack frame." + }, + "module": { + "type": "boolean", + "description": "Displays the module of the stack frame." + }, + "includeAll": { + "type": "boolean", + "description": "Includes all stack frames, including those the debug adapter might otherwise hide." + } + } + }] + }, + + "ExceptionOptions": { + "type": "object", + "description": "An ExceptionOptions assigns configuration options to a set of exceptions.", + "properties": { + "path": { + "type": "array", + "items": { + "$ref": "#/definitions/ExceptionPathSegment" + }, + "description": "A path that selects a single or multiple exceptions in a tree. If 'path' is missing, the whole tree is selected. By convention the first segment of the path is a category that is used to group exceptions in the UI." + }, + "breakMode": { + "$ref": "#/definitions/ExceptionBreakMode", + "description": "Condition when a thrown exception should result in a break." + } + }, + "required": [ "breakMode" ] + }, + + "ExceptionBreakMode": { + "type": "string", + "description": "This enumeration defines all possible conditions when a thrown exception should result in a break.\nnever: never breaks,\nalways: always breaks,\nunhandled: breaks when excpetion unhandled,\nuserUnhandled: breaks if the exception is not handled by user code.", + "enum": [ "never", "always", "unhandled", "userUnhandled" ] + }, + + "ExceptionPathSegment": { + "type": "object", + "description": "An ExceptionPathSegment represents a segment in a path that is used to match leafs or nodes in a tree of exceptions. If a segment consists of more than one name, it matches the names provided if 'negate' is false or missing or it matches anything except the names provided if 'negate' is true.", + "properties": { + "negate": { + "type": "boolean", + "description": "If false or missing this segment matches the names provided, otherwise it matches anything except the names provided." + }, + "names": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Depending on the value of 'negate' the names that should match or not match." + } + }, + "required": [ "names" ] + }, + + "ExceptionDetails": { + "type": "object", + "description": "Detailed information about an exception that has occurred.", + "properties": { + "message": { + "type": "string", + "description": "Message contained in the exception." + }, + "typeName": { + "type": "string", + "description": "Short type name of the exception object." + }, + "fullTypeName": { + "type": "string", + "description": "Fully-qualified type name of the exception object." + }, + "evaluateName": { + "type": "string", + "description": "Optional expression that can be evaluated in the current scope to obtain the exception object." + }, + "stackTrace": { + "type": "string", + "description": "Stack trace at the time the exception was thrown." + }, + "innerException": { + "type": "array", + "items": { + "$ref": "#/definitions/ExceptionDetails" + }, + "description": "Details of the exception contained by this exception, if any." + } + } + } + + } +} diff --git a/debugger_protocol/schema/upstream.py b/debugger_protocol/schema/upstream.py index ced76dbe..8e9b6bde 100644 --- a/debugger_protocol/schema/upstream.py +++ b/debugger_protocol/schema/upstream.py @@ -3,6 +3,19 @@ from datetime import datetime from textwrap import dedent from . import UPSTREAM +from ._util import open_url, get_revision, get_checksum + + +def download(source, infile, outfile, *, _now=datetime.utcnow, _open=open_url): + """Return the corresponding metadata after downloading the schema file.""" + date = _now() + revision = get_revision(source, _open=_open) + + data = infile.read() + checksum = get_checksum(data) + outfile.write(data) + + return Metadata(source, revision, checksum, date) class Metadata(namedtuple('Metadata', 'upstream revision checksum date')): @@ -17,6 +30,11 @@ class Metadata(namedtuple('Metadata', 'upstream revision checksum date')): date: {:%s} """) % TIMESTAMP + #@get_revision(upstream) + #@download(upstream, revision=None) + #validate_file(filename) + #verify_remote() + @classmethod def parse(cls, data): """Return an instance based on the given metadata string.""" diff --git a/tests/debugger_protocol/schema/test_upstream.py b/tests/debugger_protocol/schema/test_upstream.py index 0c6ac15d..0cf870f2 100644 --- a/tests/debugger_protocol/schema/test_upstream.py +++ b/tests/debugger_protocol/schema/test_upstream.py @@ -1,9 +1,10 @@ from datetime import datetime +import io from textwrap import dedent import unittest from debugger_protocol.schema import UPSTREAM -from debugger_protocol.schema.upstream import Metadata +from debugger_protocol.schema.upstream import download, Metadata class Stringlike: @@ -19,6 +20,31 @@ class Hash(Stringlike): pass +class DownloadTests(unittest.TestCase): + + def test_success(self): + now = datetime.utcnow() + infile = io.BytesIO(b'') + outfile = io.BytesIO() + buf = io.BytesIO( + b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}') + meta = download('https://github.com/x/y/raw/master/z', + infile, + outfile, + _now=(lambda: now), + _open=(lambda _: buf), + ) + rcvd = outfile.getvalue() + + self.assertEqual(meta, Metadata( + 'https://github.com/x/y/raw/master/z', + 'fc2395ca3564fb2afded8d90ddbe38dad1bf86f1', + 'e778c3751f9d0bceaf8d5aa81e2c659f', + now, + )) + self.assertEqual(rcvd, b'') + + class MetadataTests(unittest.TestCase): def test_parse_minimal(self): From 8985aeead0c7e687139d2f18e630c860f5755595 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Wed, 10 Jan 2018 19:33:59 +0000 Subject: [PATCH 07/32] Add a command for checking the vendored schema file. --- Makefile | 4 + debugger_protocol/schema/__init__.py | 4 - debugger_protocol/schema/__main__.py | 36 ++- debugger_protocol/schema/_util.py | 14 +- debugger_protocol/schema/file.py | 15 ++ debugger_protocol/schema/metadata.py | 117 ++++++++++ debugger_protocol/schema/upstream.py | 98 ++------ debugger_protocol/schema/vendored.py | 67 ++++++ tests/debugger_protocol/schema/helpers.py | 15 ++ .../debugger_protocol/schema/test___main__.py | 92 ++++++++ tests/debugger_protocol/schema/test_file.py | 22 ++ .../debugger_protocol/schema/test_metadata.py | 210 ++++++++++++++++++ .../debugger_protocol/schema/test_upstream.py | 164 +++----------- .../debugger_protocol/schema/test_vendored.py | 137 ++++++++++++ 14 files changed, 760 insertions(+), 235 deletions(-) create mode 100644 debugger_protocol/schema/file.py create mode 100644 debugger_protocol/schema/metadata.py create mode 100644 debugger_protocol/schema/vendored.py create mode 100644 tests/debugger_protocol/schema/helpers.py create mode 100644 tests/debugger_protocol/schema/test___main__.py create mode 100644 tests/debugger_protocol/schema/test_file.py create mode 100644 tests/debugger_protocol/schema/test_metadata.py create mode 100644 tests/debugger_protocol/schema/test_vendored.py diff --git a/Makefile b/Makefile index 89a9908f..0693541f 100644 --- a/Makefile +++ b/Makefile @@ -20,3 +20,7 @@ test: ## Run the test suite. .PHONY: coverage coverage: ## Check line coverage. $(PYTHON) -m coverage run -m tests + +.PHONY: check-schemafile +check-schemafile: ## Validate the vendored schema file. + python3 -m debugger_protocol.schema check diff --git a/debugger_protocol/schema/__init__.py b/debugger_protocol/schema/__init__.py index f8f9e890..7967ce5f 100644 --- a/debugger_protocol/schema/__init__.py +++ b/debugger_protocol/schema/__init__.py @@ -2,7 +2,3 @@ import os.path DATA_DIR = os.path.dirname(__file__) - -UPSTREAM = 'https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json' # noqa -VENDORED = os.path.join(DATA_DIR, 'debugProtocol.json') -METADATA = os.path.join(DATA_DIR, 'UPSTREAM') diff --git a/debugger_protocol/schema/__main__.py b/debugger_protocol/schema/__main__.py index 5c5847ee..773502de 100644 --- a/debugger_protocol/schema/__main__.py +++ b/debugger_protocol/schema/__main__.py @@ -1,10 +1,10 @@ import argparse -import os.path import sys -from . import (UPSTREAM, VENDORED, METADATA, - upstream) from ._util import open_url +from .metadata import open_metadata +from .upstream import URL as UPSTREAM, download +from .vendored import FILENAME as VENDORED, check_local, check_upstream COMMANDS = {} @@ -18,20 +18,33 @@ def as_command(name): @as_command('download') -def handle_download(source=UPSTREAM, target=VENDORED): +def handle_download(source=UPSTREAM, target=VENDORED, *, + _open=open, _open_url=open_url): # Download the schema file. - with open_url(source) as infile: - with open(target, 'wb') as outfile: - meta = upstream.download(source, infile, outfile) + with _open_url(source) as infile: + with _open(target, 'wb') as outfile: + meta = download(source, infile, outfile, + _open=_open) # Save the metadata. - filename = os.path.join(os.path.dirname(target), - os.path.basename(METADATA)) - with open(filename, 'w') as metafile: + metafile, _ = open_metadata(target, 'w', + _open=_open) + with metafile: metafile.write( meta.format()) +@as_command('check') +def handle_check(schemafile=VENDORED, *, _open=open, _open_url=open_url): + print('checking local schema file...') + check_local(schemafile, + _open=_open) + print('comparing with upstream schema file...') + check_upstream(schemafile, + _open=_open, _open_url=_open_url) + print('schema file okay') + + ############################# # the script @@ -58,6 +71,9 @@ def parse_args(argv=sys.argv[1:], prog=None): download.add_argument('--source', default=UPSTREAM) download.add_argument('--target', default=VENDORED) + check = subs.add_parser('check') + check.add_argument('--schemafile', default=VENDORED) + args = parser.parse_args(argv) if args.command is None: parser.print_help() diff --git a/debugger_protocol/schema/_util.py b/debugger_protocol/schema/_util.py index 1c7b4bb4..a0bb544f 100644 --- a/debugger_protocol/schema/_util.py +++ b/debugger_protocol/schema/_util.py @@ -12,7 +12,7 @@ def open_url(url): def get_revision(url, *, _open=open_url): """Return the revision corresponding to the given URL.""" if url.startswith('https://github.com/'): - return get_github_revision(url, _open=_open) + return github_get_revision(url, _open=_open) else: raise NotImplementedError @@ -35,7 +35,7 @@ GH_RESOURCE_RE = re.compile(r'^https://github.com' r'/(?P.*)$') -def get_github_revision(url, *, _open=open_url): +def github_get_revision(url, *, _open=open_url): """Return the full commit hash corresponding to the given URL.""" m = GH_RESOURCE_RE.match(url) if not m: @@ -48,3 +48,13 @@ def get_github_revision(url, *, _open=open_url): raw = revinfo.read() data = json.loads(raw.decode()) return data['sha'] + + +def github_url_replace_ref(url, newref): + """Return a new URL with the ref replaced.""" + m = GH_RESOURCE_RE.match(url) + if not m: + raise ValueError('invalid GitHub resource URL: {!r}'.format(url)) + org, repo, kind, _, path = m.groups() + parts = (org, repo, kind, newref, path) + return 'https://github.com/' + '/'.join(parts) diff --git a/debugger_protocol/schema/file.py b/debugger_protocol/schema/file.py new file mode 100644 index 00000000..701090a5 --- /dev/null +++ b/debugger_protocol/schema/file.py @@ -0,0 +1,15 @@ + + +class SchemaFileError(Exception): + """A schema-file-related operation failed.""" + + +def read_schema(filename, *, _open=open): + """Return the data (bytes) in the given schema file.""" + try: + schemafile = _open(filename, 'rb') + except FileNotFoundError as exc: + raise SchemaFileError( + 'schema file {!r} not found'.format(filename)) + with schemafile: + return schemafile.read() diff --git a/debugger_protocol/schema/metadata.py b/debugger_protocol/schema/metadata.py new file mode 100644 index 00000000..5ec49a38 --- /dev/null +++ b/debugger_protocol/schema/metadata.py @@ -0,0 +1,117 @@ +from collections import namedtuple +from datetime import datetime +import os.path +from textwrap import dedent + +from ._util import github_url_replace_ref + + +class MetadataError(Exception): + """A metadata-related operation failed.""" + + +def open_metadata(schemafile, mode='r', *, _open=open): + """Return a file object for the metadata of the given schema file. + + Also return the metadata file's filename. + """ + from .vendored import METADATA + filename = os.path.join(os.path.dirname(schemafile), + os.path.basename(METADATA)) + try: + return _open(filename), filename + except FileNotFoundError as exc: + raise MetadataError( + 'metadata file for {!r} not found'.format(schemafile)) + + +def read_metadata(schemafile, *, _open=open): + """Return the metadata corresponding to the schema file. + + Also return the path to the metadata file. + """ + metafile, filename = open_metadata(schemafile, _open=_open) + with metafile: + data = metafile.read() + + try: + meta = Metadata.parse(data) + except Exception as exc: + raise MetadataError( + 'metadata file {!r} not valid: {}'.format(filename, exc)) + + return meta, filename + + +class Metadata(namedtuple('Metadata', 'upstream revision checksum date')): + """Info about the local copy of the upstream schema file.""" + + TIMESTAMP = '%Y-%m-%d %H:%M:%S (UTC)' + + FORMAT = dedent("""\ + upstream: {} + revision: {} + checksum: {} + date: {:%s} + """) % TIMESTAMP + + @classmethod + def parse(cls, data): + """Return an instance based on the given metadata string.""" + lines = data.splitlines() + + kwargs = {} + for line in lines: + line = line.strip() + if line.startswith('#'): + continue + if not line: + continue + field, _, value = line.partition(':') + kwargs[field] = value.strip() + self = cls(**kwargs) + return self + + def __new__(cls, upstream, revision, checksum, date): + # coercion + upstream = str(upstream) if upstream else None + revision = str(revision) if revision else None + checksum = str(checksum) if checksum else None + if not date: + date = None + elif isinstance(date, str): + date = datetime.strptime(date, cls.TIMESTAMP) + elif date.tzinfo is not None: + date -= date.utcoffset() + + self = super().__new__(cls, upstream, revision, checksum, date) + return self + + def __init__(self, *args, **kwargs): + # validation + + if not self.upstream: + raise ValueError('missing upstream URL') + # TODO ensure upstream is URL? + + if not self.revision: + raise ValueError('missing upstream revision') + # TODO ensure revision is a hash? + + if not self.checksum: + raise ValueError('missing checksum') + # TODO ensure checksum is a MD5 hash? + + if not self.date: + raise ValueError('missing date') + + @property + def url(self): + if self.upstream.startswith('https://github.com/'): + return github_url_replace_ref(self.upstream, self.revision) + else: + raise NotImplementedError + + def format(self): + """Return a string containing the formatted metadata.""" + return self.FORMAT.format(*self) diff --git a/debugger_protocol/schema/upstream.py b/debugger_protocol/schema/upstream.py index 8e9b6bde..62597665 100644 --- a/debugger_protocol/schema/upstream.py +++ b/debugger_protocol/schema/upstream.py @@ -1,9 +1,13 @@ -from collections import namedtuple from datetime import datetime -from textwrap import dedent +import io +import urllib.error -from . import UPSTREAM from ._util import open_url, get_revision, get_checksum +from .file import SchemaFileError +from .metadata import Metadata + + +URL = 'https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json' # noqa def download(source, infile, outfile, *, _now=datetime.utcnow, _open=open_url): @@ -18,80 +22,14 @@ def download(source, infile, outfile, *, _now=datetime.utcnow, _open=open_url): return Metadata(source, revision, checksum, date) -class Metadata(namedtuple('Metadata', 'upstream revision checksum date')): - """Info about the local copy of the upstream schema file.""" - - TIMESTAMP = '%Y-%m-%d %H:%M:%S (UTC)' - - FORMAT = dedent("""\ - upstream: {} - revision: {} - checksum: {} - date: {:%s} - """) % TIMESTAMP - - #@get_revision(upstream) - #@download(upstream, revision=None) - #validate_file(filename) - #verify_remote() - - @classmethod - def parse(cls, data): - """Return an instance based on the given metadata string.""" - lines = data.splitlines() - - kwargs = {} - for line in lines: - line = line.strip() - if line.startswith('#'): - continue - if not line: - continue - field, _, value = line.partition(':') - kwargs[field] = value.strip() - self = cls(**kwargs) - return self - - def __new__(cls, upstream, revision, checksum, date): - # coercion - upstream = str(upstream) if upstream else None - revision = str(revision) if revision else None - checksum = str(checksum) if checksum else None - if not date: - date = None - elif isinstance(date, str): - date = datetime.strptime(date, cls.TIMESTAMP) - elif date.tzinfo is not None: - date -= date.utcoffset() - - self = super().__new__(cls, upstream, revision, checksum, date) - return self - - def __init__(self, *args, **kwargs): - # validation - - if not self.upstream: - raise ValueError('missing upstream URL') - # TODO ensure upstream is URL? - - if not self.revision: - raise ValueError('missing upstream revision') - # TODO ensure revision is a hash? - - if not self.checksum: - raise ValueError('missing checksum') - # TODO ensure checksum is a MD5 hash? - - if not self.date: - raise ValueError('missing date') - - @property - def url(self): - if self.upstream == UPSTREAM: - return self.upstream.replace('master', self.revision) - else: - raise NotImplementedError - - def format(self): - """Return a string containing the formatted metadata.""" - return self.FORMAT.format(*self) +def read(url, *, _open_url=open_url): + """Return (data, metadata) for the given upstream URL.""" + outfile = io.BytesIO() + try: + infile = _open_url(url) + except (FileNotFoundError, urllib.error.HTTPError) as exc: + # XXX Ensure it's a 404 error? + raise SchemaFileError('schema file at {!r} not found'.format(url)) + with infile: + upstream = download(url, infile, outfile, _open=_open_url) + return outfile.getvalue(), upstream diff --git a/debugger_protocol/schema/vendored.py b/debugger_protocol/schema/vendored.py new file mode 100644 index 00000000..26b54690 --- /dev/null +++ b/debugger_protocol/schema/vendored.py @@ -0,0 +1,67 @@ +import os.path + +from . import DATA_DIR, upstream +from ._util import open_url, get_checksum +from .file import SchemaFileError, read_schema +from .metadata import MetadataError, read_metadata + + +FILENAME = os.path.join(DATA_DIR, 'debugProtocol.json') +METADATA = os.path.join(DATA_DIR, 'UPSTREAM') + + +class SchemaFileMismatchError(SchemaFileError, MetadataError): + """The schema file does not match expectations.""" + + @classmethod + def _build_message(cls, filename, actual, expected, upstream): + if upstream: + msg = ('local schema file {!r} does not match upstream {!r}' + ).format(filename, expected.upstream) + else: + msg = ('schema file {!r} does not match metadata file' + ).format(filename) + + for field in actual._fields: + value = getattr(actual, field) + other = getattr(expected, field) + if value != other: + msg += (' ({} mismatch: {!r} != {!r})' + ).format(field, value, other) + break + + return msg + + def __init__(self, filename, actual, expected, *, upstream=False): + super().__init__( + self._build_message(filename, actual, expected, upstream)) + self.filename = filename + self.actual = actual + self.expected = expected + self.upstream = upstream + + +def check_local(filename, *, _open=open): + """Ensure that the local schema file matches the local metadata file.""" + # Get the vendored metadata and data. + meta, _ = read_metadata(filename, _open=_open) + data = read_schema(filename, _open=_open) + + # Only worry about the checksum matching. + actual = meta._replace( + checksum=get_checksum(data)) + if actual != meta: + raise SchemaFileMismatchError(filename, actual, meta) + + +def check_upstream(filename, *, _open=open, _open_url=open_url): + """Ensure that the local metadata file matches the upstream schema file.""" + # Get the vendored and upstream metadata. + meta, _ = read_metadata(filename, _open=_open) + _, upmeta = upstream.read(meta.upstream, _open_url=_open_url) + + # Make sure the revision and checksum match. + if meta.revision != upmeta.revision: + raise SchemaFileMismatchError(filename, meta, upmeta, upstream=True) + if meta.checksum != upmeta.checksum: + raise SchemaFileMismatchError(filename, meta, upmeta, upstream=True) diff --git a/tests/debugger_protocol/schema/helpers.py b/tests/debugger_protocol/schema/helpers.py new file mode 100644 index 00000000..0144c948 --- /dev/null +++ b/tests/debugger_protocol/schema/helpers.py @@ -0,0 +1,15 @@ + + +class StubOpener: + + def __init__(self, *files): + self.files = list(files) + self.calls = [] + + def open(self, *args): + self.calls.append(args) + + file = self.files.pop(0) + if file is None: + raise FileNotFoundError + return file diff --git a/tests/debugger_protocol/schema/test___main__.py b/tests/debugger_protocol/schema/test___main__.py new file mode 100644 index 00000000..dc1073fb --- /dev/null +++ b/tests/debugger_protocol/schema/test___main__.py @@ -0,0 +1,92 @@ +import contextlib +import io +from textwrap import dedent +import unittest + +from .helpers import StubOpener +from debugger_protocol.schema.__main__ import ( + COMMANDS, handle_download, handle_check) + + +class Outfile: + + def __init__(self, initial): + self.written = initial + + def write(self, data): + self.written += data + return len(data) + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + +class CommandRegistryTests(unittest.TestCase): + + def test_commands(self): + self.assertEqual(set(COMMANDS), { + 'download', + 'check', + }) + + +class HandleDownloadTests(unittest.TestCase): + + def test_default_args(self): + schemafile = io.BytesIO(b'') + outfile = Outfile(b'') + buf = io.BytesIO( + b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}') + metafile = Outfile('') + opener = StubOpener(schemafile, outfile, buf, metafile) + + stdout = io.StringIO() + with contextlib.redirect_stdout(stdout): + with contextlib.redirect_stderr(stdout): + handle_download( + _open=opener.open, _open_url=opener.open) + metadata = '\n'.join(line + for line in metafile.written.splitlines() + if not line.startswith('date: ')) + + self.assertEqual(outfile.written, b'') + self.assertEqual(metadata, dedent(""" + upstream: https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json + revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + """).strip()) # noqa + self.assertEqual(stdout.getvalue(), '') + + +class HandleCheckTests(unittest.TestCase): + + def test_default_args(self): + metadata = dedent(""" + upstream: https://github.com/x/y/raw/master/z + revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + date: 2018-01-09 13:10:59 (UTC) + """) + opener = StubOpener( + io.StringIO(metadata), + io.BytesIO(b''), # local + io.StringIO(metadata), + io.BytesIO(b''), # upstream + io.BytesIO( + b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}'), + ) + + stdout = io.StringIO() + with contextlib.redirect_stdout(stdout): + with contextlib.redirect_stderr(stdout): + handle_check( + _open=opener.open, _open_url=opener.open) + + self.assertEqual(stdout.getvalue(), dedent("""\ + checking local schema file... + comparing with upstream schema file... + schema file okay + """)) diff --git a/tests/debugger_protocol/schema/test_file.py b/tests/debugger_protocol/schema/test_file.py new file mode 100644 index 00000000..bbfc00a6 --- /dev/null +++ b/tests/debugger_protocol/schema/test_file.py @@ -0,0 +1,22 @@ +import io +import unittest + +from .helpers import StubOpener +from debugger_protocol.schema.file import SchemaFileError, read_schema + + +class ReadSchemaTests(unittest.TestCase): + + def test_success(self): + schemafile = io.BytesIO(b'') + opener = StubOpener(schemafile) + + data = read_schema('schema.json', _open=opener.open) + + self.assertEqual(data, b'') + + def test_file_missing(self): + opener = StubOpener(None) + + with self.assertRaises(SchemaFileError): + read_schema('schema.json', _open=opener.open) diff --git a/tests/debugger_protocol/schema/test_metadata.py b/tests/debugger_protocol/schema/test_metadata.py new file mode 100644 index 00000000..f2ba18b8 --- /dev/null +++ b/tests/debugger_protocol/schema/test_metadata.py @@ -0,0 +1,210 @@ +from datetime import datetime +import io +import os.path +from textwrap import dedent +import unittest + +from .helpers import StubOpener +from debugger_protocol.schema.upstream import URL as UPSTREAM +from debugger_protocol.schema.metadata import ( + open_metadata, read_metadata, + MetadataError, Metadata) + + +class Stringlike: + + def __init__(self, value): + self.value = value + + def __str__(self): + return self.value + + +class Hash(Stringlike): + pass + + +class OpenMetadataTests(unittest.TestCase): + + def test_success(self): + expected = object() + opener = StubOpener(expected) + schemadir = os.path.join('x', 'y', 'z', '') + metafile, filename = open_metadata(schemadir + 'schema.json', + _open=opener.open) + + self.assertIs(metafile, expected) + self.assertEqual(filename, schemadir + 'UPSTREAM') + + def test_file_missing(self): + metafile = None + opener = StubOpener(metafile) + + with self.assertRaises(MetadataError): + open_metadata('schema.json', _open=opener.open) + + +class ReadMetadataTests(unittest.TestCase): + + def test_success(self): + metafile = io.StringIO(dedent(""" + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: deadbeefdeadbeefdeadbeefdeadbeef + date: 2018-01-09 13:10:59 (UTC) + """)) + opener = StubOpener(metafile) + schemadir = os.path.join('x', 'y', 'z', '') + meta, filename = read_metadata(schemadir + 'schema.json', + _open=opener.open) + + self.assertEqual(meta, + Metadata('https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + )) + self.assertEqual(filename, schemadir + 'UPSTREAM') + + def test_file_missing(self): + metafile = None + opener = StubOpener(metafile) + + with self.assertRaises(MetadataError): + read_metadata('schema.json', _open=opener.open) + + def test_file_invalid(self): + metafile = io.StringIO('') + opener = StubOpener(metafile) + + with self.assertRaises(MetadataError): + read_metadata('schema.json', _open=opener.open) + + +class MetadataTests(unittest.TestCase): + + def test_parse_minimal(self): + expected = Metadata('https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + ) + meta = Metadata.parse(dedent(""" + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: deadbeefdeadbeefdeadbeefdeadbeef + date: 2018-01-09 13:10:59 (UTC) + """)) + + self.assertEqual(meta, expected) + + def test_parse_with_whitespace_and_comments(self): + expected = Metadata('https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + ) + meta = Metadata.parse(dedent(""" + + # generated by x.y.z + upstream: https://x.y.z/schema.json + + revision: abcdef0123456789 + checksum: deadbeefdeadbeefdeadbeefdeadbeef + date: 2018-01-09 13:10:59 (UTC) + + # done! + + """)) # noqa + + self.assertEqual(meta, expected) + + def test_parse_roundtrip_from_object(self): + orig = Metadata('https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + ) + meta = Metadata.parse( + orig.format()) + + self.assertEqual(meta, orig) + + def test_parse_roundtrip_from_string(self): + orig = dedent("""\ + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: deadbeefdeadbeefdeadbeefdeadbeef + date: 2018-01-09 13:10:59 (UTC) + """) + data = (Metadata.parse(orig) + ).format() + + self.assertEqual(data, orig) + + def test_coercion_noop(self): + meta = Metadata('https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + ) + + self.assertEqual(meta, ( + 'https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + )) + + def test_coercion_change_all(self): + meta = Metadata(Stringlike('https://x.y.z/schema.json'), + Hash('abcdef0123456789'), + Hash('deadbeefdeadbeefdeadbeefdeadbeef'), + '2018-01-09 13:10:59 (UTC)', + ) + + self.assertEqual(meta, ( + 'https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + )) + + def test_validation_fail(self): + baseargs = [ + 'https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + ] + for i in range(len(baseargs)): + with self.subTest(baseargs[i]): + args = list(baseargs) + args[i] = '' + with self.assertRaises(ValueError): + Metadata(*args) + + def test_url(self): + meta = Metadata(UPSTREAM, + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + ) + url = meta.url + + self.assertEqual(url, 'https://github.com/Microsoft/vscode-debugadapter-node/raw/abcdef0123456789/debugProtocol.json') # noqa + + def test_format(self): + meta = Metadata('https://x.y.z/schema.json', + 'abcdef0123456789', + 'deadbeefdeadbeefdeadbeefdeadbeef', + datetime(2018, 1, 9, 13, 10, 59), + ) + formatted = meta.format() + + self.assertEqual(formatted, dedent("""\ + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: deadbeefdeadbeefdeadbeefdeadbeef + date: 2018-01-09 13:10:59 (UTC) + """)) diff --git a/tests/debugger_protocol/schema/test_upstream.py b/tests/debugger_protocol/schema/test_upstream.py index 0cf870f2..e969ae35 100644 --- a/tests/debugger_protocol/schema/test_upstream.py +++ b/tests/debugger_protocol/schema/test_upstream.py @@ -1,23 +1,12 @@ from datetime import datetime import io -from textwrap import dedent import unittest -from debugger_protocol.schema import UPSTREAM -from debugger_protocol.schema.upstream import download, Metadata - - -class Stringlike: - - def __init__(self, value): - self.value = value - - def __str__(self): - return self.value - - -class Hash(Stringlike): - pass +from .helpers import StubOpener +from debugger_protocol.schema.file import SchemaFileError +from debugger_protocol.schema.metadata import Metadata +from debugger_protocol.schema.upstream import ( + download, read) class DownloadTests(unittest.TestCase): @@ -45,130 +34,27 @@ class DownloadTests(unittest.TestCase): self.assertEqual(rcvd, b'') -class MetadataTests(unittest.TestCase): +class ReadSchemaTests(unittest.TestCase): - def test_parse_minimal(self): - expected = Metadata('https://x.y.z/schema.json', - 'abcdef0123456789', - 'deadbeefdeadbeefdeadbeefdeadbeef', - datetime(2018, 1, 9, 13, 10, 59), - ) - meta = Metadata.parse(dedent(""" - upstream: https://x.y.z/schema.json - revision: abcdef0123456789 - checksum: deadbeefdeadbeefdeadbeefdeadbeef - date: 2018-01-09 13:10:59 (UTC) - """)) + def test_success(self): + schemafile = io.BytesIO(b'') + buf = io.BytesIO( + b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}') + opener = StubOpener(schemafile, buf) + data, meta = read('https://github.com/x/y/raw/master/z', + _open_url=opener.open) - self.assertEqual(meta, expected) + self.assertEqual(data, b'') + self.assertEqual(meta, Metadata( + 'https://github.com/x/y/raw/master/z', + 'fc2395ca3564fb2afded8d90ddbe38dad1bf86f1', + 'e778c3751f9d0bceaf8d5aa81e2c659f', + meta.date, + )) - def test_parse_with_whitespace_and_comments(self): - expected = Metadata('https://x.y.z/schema.json', - 'abcdef0123456789', - 'deadbeefdeadbeefdeadbeefdeadbeef', - datetime(2018, 1, 9, 13, 10, 59), - ) - meta = Metadata.parse(dedent(""" + def test_resource_missing(self): + schemafile = None + opener = StubOpener(schemafile) - # generated by x.y.z - upstream: https://x.y.z/schema.json - - revision: abcdef0123456789 - checksum: deadbeefdeadbeefdeadbeefdeadbeef - date: 2018-01-09 13:10:59 (UTC) - - # done! - - """)) # noqa - - self.assertEqual(meta, expected) - - def test_parse_roundtrip_from_object(self): - orig = Metadata('https://x.y.z/schema.json', - 'abcdef0123456789', - 'deadbeefdeadbeefdeadbeefdeadbeef', - datetime(2018, 1, 9, 13, 10, 59), - ) - meta = Metadata.parse( - orig.format()) - - self.assertEqual(meta, orig) - - def test_parse_roundtrip_from_string(self): - orig = dedent("""\ - upstream: https://x.y.z/schema.json - revision: abcdef0123456789 - checksum: deadbeefdeadbeefdeadbeefdeadbeef - date: 2018-01-09 13:10:59 (UTC) - """).format(UPSTREAM) - data = (Metadata.parse(orig) - ).format() - - self.assertEqual(data, orig) - - def test_coercion_noop(self): - meta = Metadata('https://x.y.z/schema.json', - 'abcdef0123456789', - 'deadbeefdeadbeefdeadbeefdeadbeef', - datetime(2018, 1, 9, 13, 10, 59), - ) - - self.assertEqual(meta, ( - 'https://x.y.z/schema.json', - 'abcdef0123456789', - 'deadbeefdeadbeefdeadbeefdeadbeef', - datetime(2018, 1, 9, 13, 10, 59), - )) - - def test_coercion_change_all(self): - meta = Metadata(Stringlike('https://x.y.z/schema.json'), - Hash('abcdef0123456789'), - Hash('deadbeefdeadbeefdeadbeefdeadbeef'), - '2018-01-09 13:10:59 (UTC)', - ) - - self.assertEqual(meta, ( - 'https://x.y.z/schema.json', - 'abcdef0123456789', - 'deadbeefdeadbeefdeadbeefdeadbeef', - datetime(2018, 1, 9, 13, 10, 59), - )) - - def test_validation_fail(self): - baseargs = [ - 'https://x.y.z/schema.json', - 'abcdef0123456789', - 'deadbeefdeadbeefdeadbeefdeadbeef', - datetime(2018, 1, 9, 13, 10, 59), - ] - for i in range(len(baseargs)): - with self.subTest(baseargs[i]): - args = list(baseargs) - args[i] = '' - with self.assertRaises(ValueError): - Metadata(*args) - - def test_url(self): - meta = Metadata(UPSTREAM, - 'abcdef0123456789', - 'deadbeefdeadbeefdeadbeefdeadbeef', - datetime(2018, 1, 9, 13, 10, 59), - ) - url = meta.url - - self.assertEqual(url, 'https://github.com/Microsoft/vscode-debugadapter-node/raw/abcdef0123456789/debugProtocol.json') # noqa - - def test_format(self): - meta = Metadata('https://x.y.z/schema.json', - 'abcdef0123456789', - 'deadbeefdeadbeefdeadbeefdeadbeef', - datetime(2018, 1, 9, 13, 10, 59), - ) - formatted = meta.format() - - self.assertEqual(formatted, dedent("""\ - upstream: https://x.y.z/schema.json - revision: abcdef0123456789 - checksum: deadbeefdeadbeefdeadbeefdeadbeef - date: 2018-01-09 13:10:59 (UTC) - """)) + with self.assertRaises(SchemaFileError): + read('schema.json', _open_url=opener.open) diff --git a/tests/debugger_protocol/schema/test_vendored.py b/tests/debugger_protocol/schema/test_vendored.py new file mode 100644 index 00000000..48a3ab87 --- /dev/null +++ b/tests/debugger_protocol/schema/test_vendored.py @@ -0,0 +1,137 @@ +import io +from textwrap import dedent +import unittest + +from .helpers import StubOpener +from debugger_protocol.schema.file import SchemaFileError +from debugger_protocol.schema.metadata import MetadataError +from debugger_protocol.schema.vendored import ( + SchemaFileMismatchError, check_local, check_upstream) + + +class CheckLocalTests(unittest.TestCase): + + def test_match(self): + metafile = io.StringIO(dedent(""" + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + date: 2018-01-09 13:10:59 (UTC) + """)) + schemafile = io.BytesIO(b'') + opener = StubOpener(metafile, schemafile) + + # This does not fail. + check_local('schema.json', _open=opener.open) + + def test_mismatch(self): + metafile = io.StringIO(dedent(""" + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: abc2 + date: 2018-01-09 13:10:59 (UTC) + """)) + schemafile = io.BytesIO(b'') + opener = StubOpener(metafile, schemafile) + + with self.assertRaises(SchemaFileMismatchError) as cm: + check_local('schema.json', _open=opener.open) + self.assertEqual(str(cm.exception), + ('schema file \'schema.json\' does not match ' + 'metadata file (checksum mismatch: ' + '\'e778c3751f9d0bceaf8d5aa81e2c659f\' != \'abc2\')')) + + def test_metafile_missing(self): + metafile = None + schemafile = io.BytesIO(b'') + opener = StubOpener(metafile, schemafile) + + with self.assertRaises(MetadataError): + check_local('schema.json', _open=opener.open) + + def test_metafile_invalid(self): + metafile = io.StringIO('') + metafile.name = '/x/y/z/UPSTREAM' + schemafile = io.BytesIO(b'') + opener = StubOpener(metafile, schemafile) + + with self.assertRaises(MetadataError): + check_local('schema.json', _open=opener.open) + + def test_schemafile_missing(self): + metafile = io.StringIO(dedent(""" + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + date: 2018-01-09 13:10:59 (UTC) + """)) + schemafile = None + opener = StubOpener(metafile, schemafile) + + with self.assertRaises(SchemaFileError): + check_local('schema.json', _open=opener.open) + + +class CheckUpstream(unittest.TestCase): + + def test_match(self): + metafile = io.StringIO(dedent(""" + upstream: https://github.com/x/y/raw/master/z + revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + date: 2018-01-09 13:10:59 (UTC) + """)) + schemafile = io.BytesIO(b'') + buf = io.BytesIO( + b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}') + opener = StubOpener(metafile, schemafile, buf) + + # This does not fail. + check_upstream('schema.json', + _open=opener.open, _open_url=opener.open) + + def test_revision_mismatch(self): + metafile = io.StringIO(dedent(""" + upstream: https://github.com/x/y/raw/master/z + revision: abc2 + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + date: 2018-01-09 13:10:59 (UTC) + """)) + schemafile = io.BytesIO(b'') + buf = io.BytesIO( + b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}') + opener = StubOpener(metafile, schemafile, buf) + + with self.assertRaises(SchemaFileMismatchError) as cm: + check_upstream('schema.json', + _open=opener.open, _open_url=opener.open) + self.assertEqual(str(cm.exception), + ('local schema file \'schema.json\' does not match ' + 'upstream \'https://github.com/x/y/raw/master/z\' ' + '(revision mismatch: \'abc2\' != \'fc2395ca3564fb2afded8d90ddbe38dad1bf86f1\')')) # noqa + + def test_checksum_mismatch(self): + metafile = io.StringIO(dedent(""" + upstream: https://github.com/x/y/raw/master/z + revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 + checksum: abc2 + date: 2018-01-09 13:10:59 (UTC) + """)) + schemafile = io.BytesIO(b'') + buf = io.BytesIO( + b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}') + opener = StubOpener(metafile, schemafile, buf) + + with self.assertRaises(SchemaFileMismatchError) as cm: + check_upstream('schema.json', + _open=opener.open, _open_url=opener.open) + self.assertEqual(str(cm.exception), + ('local schema file \'schema.json\' does not match ' + 'upstream \'https://github.com/x/y/raw/master/z\' ' + '(checksum mismatch: \'abc2\' != \'e778c3751f9d0bceaf8d5aa81e2c659f\')')) # noqa + + def test_metafile_missing(self): + ... + + def test_url_resource_missing(self): + ... From 8e42001f3a68ca43c776d892b40a2db5453c6892 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Wed, 10 Jan 2018 19:44:03 +0000 Subject: [PATCH 08/32] date -> downloaded --- debugger_protocol/schema/UPSTREAM | 8 ++-- debugger_protocol/schema/metadata.py | 31 ++++++------ debugger_protocol/schema/upstream.py | 4 +- .../debugger_protocol/schema/test___main__.py | 16 +++---- .../debugger_protocol/schema/test_metadata.py | 34 ++++++------- .../debugger_protocol/schema/test_upstream.py | 2 +- .../debugger_protocol/schema/test_vendored.py | 48 +++++++++---------- 7 files changed, 72 insertions(+), 71 deletions(-) diff --git a/debugger_protocol/schema/UPSTREAM b/debugger_protocol/schema/UPSTREAM index f6e399d4..3bbccb3d 100644 --- a/debugger_protocol/schema/UPSTREAM +++ b/debugger_protocol/schema/UPSTREAM @@ -1,4 +1,4 @@ -upstream: https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json -revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 -checksum: 24a370d038f7875f4db2631d5238fd17 -date: 2018-01-10 00:47:10 (UTC) +upstream: https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json +revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 +checksum: 24a370d038f7875f4db2631d5238fd17 +downloaded: 2018-01-10 00:47:10 (UTC) diff --git a/debugger_protocol/schema/metadata.py b/debugger_protocol/schema/metadata.py index 5ec49a38..c6cd1b84 100644 --- a/debugger_protocol/schema/metadata.py +++ b/debugger_protocol/schema/metadata.py @@ -43,16 +43,17 @@ def read_metadata(schemafile, *, _open=open): return meta, filename -class Metadata(namedtuple('Metadata', 'upstream revision checksum date')): +class Metadata( + namedtuple('Metadata', 'upstream revision checksum downloaded')): """Info about the local copy of the upstream schema file.""" TIMESTAMP = '%Y-%m-%d %H:%M:%S (UTC)' FORMAT = dedent("""\ - upstream: {} - revision: {} - checksum: {} - date: {:%s} + upstream: {} + revision: {} + checksum: {} + downloaded: {:%s} """) % TIMESTAMP @classmethod @@ -72,19 +73,19 @@ class Metadata(namedtuple('Metadata', 'upstream revision checksum date')): self = cls(**kwargs) return self - def __new__(cls, upstream, revision, checksum, date): + def __new__(cls, upstream, revision, checksum, downloaded): # coercion upstream = str(upstream) if upstream else None revision = str(revision) if revision else None checksum = str(checksum) if checksum else None - if not date: - date = None - elif isinstance(date, str): - date = datetime.strptime(date, cls.TIMESTAMP) - elif date.tzinfo is not None: - date -= date.utcoffset() + if not downloaded: + downloaded = None + elif isinstance(downloaded, str): + downloaded = datetime.strptime(downloaded, cls.TIMESTAMP) + elif downloaded.tzinfo is not None: + downloaded -= downloaded.utcoffset() - self = super().__new__(cls, upstream, revision, checksum, date) + self = super().__new__(cls, upstream, revision, checksum, downloaded) return self def __init__(self, *args, **kwargs): @@ -102,8 +103,8 @@ class Metadata(namedtuple('Metadata', 'upstream revision checksum date')): raise ValueError('missing checksum') # TODO ensure checksum is a MD5 hash? - if not self.date: - raise ValueError('missing date') + if not self.downloaded: + raise ValueError('missing downloaded') @property def url(self): diff --git a/debugger_protocol/schema/upstream.py b/debugger_protocol/schema/upstream.py index 62597665..48a6fa2d 100644 --- a/debugger_protocol/schema/upstream.py +++ b/debugger_protocol/schema/upstream.py @@ -12,14 +12,14 @@ URL = 'https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugPro def download(source, infile, outfile, *, _now=datetime.utcnow, _open=open_url): """Return the corresponding metadata after downloading the schema file.""" - date = _now() + timestamp = _now() revision = get_revision(source, _open=_open) data = infile.read() checksum = get_checksum(data) outfile.write(data) - return Metadata(source, revision, checksum, date) + return Metadata(source, revision, checksum, timestamp) def read(url, *, _open_url=open_url): diff --git a/tests/debugger_protocol/schema/test___main__.py b/tests/debugger_protocol/schema/test___main__.py index dc1073fb..9238c0ff 100644 --- a/tests/debugger_protocol/schema/test___main__.py +++ b/tests/debugger_protocol/schema/test___main__.py @@ -50,13 +50,13 @@ class HandleDownloadTests(unittest.TestCase): _open=opener.open, _open_url=opener.open) metadata = '\n'.join(line for line in metafile.written.splitlines() - if not line.startswith('date: ')) + if not line.startswith('downloaded: ')) self.assertEqual(outfile.written, b'') self.assertEqual(metadata, dedent(""" - upstream: https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json - revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 - checksum: e778c3751f9d0bceaf8d5aa81e2c659f + upstream: https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json + revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 + checksum: e778c3751f9d0bceaf8d5aa81e2c659f """).strip()) # noqa self.assertEqual(stdout.getvalue(), '') @@ -65,10 +65,10 @@ class HandleCheckTests(unittest.TestCase): def test_default_args(self): metadata = dedent(""" - upstream: https://github.com/x/y/raw/master/z - revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 - checksum: e778c3751f9d0bceaf8d5aa81e2c659f - date: 2018-01-09 13:10:59 (UTC) + upstream: https://github.com/x/y/raw/master/z + revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + downloaded: 2018-01-09 13:10:59 (UTC) """) opener = StubOpener( io.StringIO(metadata), diff --git a/tests/debugger_protocol/schema/test_metadata.py b/tests/debugger_protocol/schema/test_metadata.py index f2ba18b8..a374fdc5 100644 --- a/tests/debugger_protocol/schema/test_metadata.py +++ b/tests/debugger_protocol/schema/test_metadata.py @@ -48,10 +48,10 @@ class ReadMetadataTests(unittest.TestCase): def test_success(self): metafile = io.StringIO(dedent(""" - upstream: https://x.y.z/schema.json - revision: abcdef0123456789 - checksum: deadbeefdeadbeefdeadbeefdeadbeef - date: 2018-01-09 13:10:59 (UTC) + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: deadbeefdeadbeefdeadbeefdeadbeef + downloaded: 2018-01-09 13:10:59 (UTC) """)) opener = StubOpener(metafile) schemadir = os.path.join('x', 'y', 'z', '') @@ -90,10 +90,10 @@ class MetadataTests(unittest.TestCase): datetime(2018, 1, 9, 13, 10, 59), ) meta = Metadata.parse(dedent(""" - upstream: https://x.y.z/schema.json - revision: abcdef0123456789 - checksum: deadbeefdeadbeefdeadbeefdeadbeef - date: 2018-01-09 13:10:59 (UTC) + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: deadbeefdeadbeefdeadbeefdeadbeef + downloaded: 2018-01-09 13:10:59 (UTC) """)) self.assertEqual(meta, expected) @@ -111,7 +111,7 @@ class MetadataTests(unittest.TestCase): revision: abcdef0123456789 checksum: deadbeefdeadbeefdeadbeefdeadbeef - date: 2018-01-09 13:10:59 (UTC) + downloaded: 2018-01-09 13:10:59 (UTC) # done! @@ -132,10 +132,10 @@ class MetadataTests(unittest.TestCase): def test_parse_roundtrip_from_string(self): orig = dedent("""\ - upstream: https://x.y.z/schema.json - revision: abcdef0123456789 - checksum: deadbeefdeadbeefdeadbeefdeadbeef - date: 2018-01-09 13:10:59 (UTC) + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: deadbeefdeadbeefdeadbeefdeadbeef + downloaded: 2018-01-09 13:10:59 (UTC) """) data = (Metadata.parse(orig) ).format() @@ -203,8 +203,8 @@ class MetadataTests(unittest.TestCase): formatted = meta.format() self.assertEqual(formatted, dedent("""\ - upstream: https://x.y.z/schema.json - revision: abcdef0123456789 - checksum: deadbeefdeadbeefdeadbeefdeadbeef - date: 2018-01-09 13:10:59 (UTC) + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: deadbeefdeadbeefdeadbeefdeadbeef + downloaded: 2018-01-09 13:10:59 (UTC) """)) diff --git a/tests/debugger_protocol/schema/test_upstream.py b/tests/debugger_protocol/schema/test_upstream.py index e969ae35..39b100b5 100644 --- a/tests/debugger_protocol/schema/test_upstream.py +++ b/tests/debugger_protocol/schema/test_upstream.py @@ -49,7 +49,7 @@ class ReadSchemaTests(unittest.TestCase): 'https://github.com/x/y/raw/master/z', 'fc2395ca3564fb2afded8d90ddbe38dad1bf86f1', 'e778c3751f9d0bceaf8d5aa81e2c659f', - meta.date, + meta.downloaded, )) def test_resource_missing(self): diff --git a/tests/debugger_protocol/schema/test_vendored.py b/tests/debugger_protocol/schema/test_vendored.py index 48a3ab87..839dc902 100644 --- a/tests/debugger_protocol/schema/test_vendored.py +++ b/tests/debugger_protocol/schema/test_vendored.py @@ -13,10 +13,10 @@ class CheckLocalTests(unittest.TestCase): def test_match(self): metafile = io.StringIO(dedent(""" - upstream: https://x.y.z/schema.json - revision: abcdef0123456789 - checksum: e778c3751f9d0bceaf8d5aa81e2c659f - date: 2018-01-09 13:10:59 (UTC) + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + downloaded: 2018-01-09 13:10:59 (UTC) """)) schemafile = io.BytesIO(b'') opener = StubOpener(metafile, schemafile) @@ -26,10 +26,10 @@ class CheckLocalTests(unittest.TestCase): def test_mismatch(self): metafile = io.StringIO(dedent(""" - upstream: https://x.y.z/schema.json - revision: abcdef0123456789 - checksum: abc2 - date: 2018-01-09 13:10:59 (UTC) + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: abc2 + downloaded: 2018-01-09 13:10:59 (UTC) """)) schemafile = io.BytesIO(b'') opener = StubOpener(metafile, schemafile) @@ -60,10 +60,10 @@ class CheckLocalTests(unittest.TestCase): def test_schemafile_missing(self): metafile = io.StringIO(dedent(""" - upstream: https://x.y.z/schema.json - revision: abcdef0123456789 - checksum: e778c3751f9d0bceaf8d5aa81e2c659f - date: 2018-01-09 13:10:59 (UTC) + upstream: https://x.y.z/schema.json + revision: abcdef0123456789 + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + downloaded: 2018-01-09 13:10:59 (UTC) """)) schemafile = None opener = StubOpener(metafile, schemafile) @@ -76,10 +76,10 @@ class CheckUpstream(unittest.TestCase): def test_match(self): metafile = io.StringIO(dedent(""" - upstream: https://github.com/x/y/raw/master/z - revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 - checksum: e778c3751f9d0bceaf8d5aa81e2c659f - date: 2018-01-09 13:10:59 (UTC) + upstream: https://github.com/x/y/raw/master/z + revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + downloaded: 2018-01-09 13:10:59 (UTC) """)) schemafile = io.BytesIO(b'') buf = io.BytesIO( @@ -92,10 +92,10 @@ class CheckUpstream(unittest.TestCase): def test_revision_mismatch(self): metafile = io.StringIO(dedent(""" - upstream: https://github.com/x/y/raw/master/z - revision: abc2 - checksum: e778c3751f9d0bceaf8d5aa81e2c659f - date: 2018-01-09 13:10:59 (UTC) + upstream: https://github.com/x/y/raw/master/z + revision: abc2 + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + downloaded: 2018-01-09 13:10:59 (UTC) """)) schemafile = io.BytesIO(b'') buf = io.BytesIO( @@ -112,10 +112,10 @@ class CheckUpstream(unittest.TestCase): def test_checksum_mismatch(self): metafile = io.StringIO(dedent(""" - upstream: https://github.com/x/y/raw/master/z - revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 - checksum: abc2 - date: 2018-01-09 13:10:59 (UTC) + upstream: https://github.com/x/y/raw/master/z + revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 + checksum: abc2 + downloaded: 2018-01-09 13:10:59 (UTC) """)) schemafile = io.BytesIO(b'') buf = io.BytesIO( From 4483567f1af3a95537bf957e4cc966084ef860ba Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Wed, 10 Jan 2018 22:58:29 +0000 Subject: [PATCH 09/32] XXX -> TODO --- debugger_protocol/schema/upstream.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debugger_protocol/schema/upstream.py b/debugger_protocol/schema/upstream.py index 48a6fa2d..b46a057d 100644 --- a/debugger_protocol/schema/upstream.py +++ b/debugger_protocol/schema/upstream.py @@ -28,7 +28,7 @@ def read(url, *, _open_url=open_url): try: infile = _open_url(url) except (FileNotFoundError, urllib.error.HTTPError) as exc: - # XXX Ensure it's a 404 error? + # TODO: Ensure it's a 404 error? raise SchemaFileError('schema file at {!r} not found'.format(url)) with infile: upstream = download(url, infile, outfile, _open=_open_url) From 1f9b0f84dd1952c5fdd5c50fd669d34e69f8dacd Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Thu, 11 Jan 2018 16:56:40 +0000 Subject: [PATCH 10/32] Fix the filename of the tests.__main__ tests. --- tests/{test_test_main.py => test_tests___main__.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/{test_test_main.py => test_tests___main__.py} (100%) diff --git a/tests/test_test_main.py b/tests/test_tests___main__.py similarity index 100% rename from tests/test_test_main.py rename to tests/test_tests___main__.py From 65fff09ee56abbb2bfc3db912725b951073765f2 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Thu, 11 Jan 2018 17:29:35 +0000 Subject: [PATCH 11/32] Add a test that actually checks the vendored schema against upstream. --- tests/system_tests/__init__.py | 0 tests/system_tests/test_schema.py | 19 +++++++++++++++++++ 2 files changed, 19 insertions(+) create mode 100644 tests/system_tests/__init__.py create mode 100644 tests/system_tests/test_schema.py diff --git a/tests/system_tests/__init__.py b/tests/system_tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/system_tests/test_schema.py b/tests/system_tests/test_schema.py new file mode 100644 index 00000000..f020e86e --- /dev/null +++ b/tests/system_tests/test_schema.py @@ -0,0 +1,19 @@ +import contextlib +import io +import unittest + +from debugger_protocol.schema.__main__ import handle_check + + +class VendoredSchemaTests(unittest.TestCase): + + def test_matches_upstream(self): + stdout = io.StringIO() + with contextlib.redirect_stdout(stdout): + with contextlib.redirect_stderr(stdout): + try: + handle_check() + except Exception as exc: + self.fail(str(exc)) + result = stdout.getvalue().strip().splitlines()[-1] + self.assertEqual(result, 'schema file okay') From 0d71d2cf75ae74b463569c208d2dee39765c91d5 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Thu, 11 Jan 2018 18:06:34 +0000 Subject: [PATCH 12/32] _open -> _open_url --- debugger_protocol/schema/__main__.py | 2 +- debugger_protocol/schema/_util.py | 8 ++++---- debugger_protocol/schema/upstream.py | 7 ++++--- tests/debugger_protocol/schema/test_upstream.py | 2 +- tests/debugger_protocol/schema/test_util.py | 2 +- 5 files changed, 11 insertions(+), 10 deletions(-) diff --git a/debugger_protocol/schema/__main__.py b/debugger_protocol/schema/__main__.py index 773502de..fb9817a6 100644 --- a/debugger_protocol/schema/__main__.py +++ b/debugger_protocol/schema/__main__.py @@ -24,7 +24,7 @@ def handle_download(source=UPSTREAM, target=VENDORED, *, with _open_url(source) as infile: with _open(target, 'wb') as outfile: meta = download(source, infile, outfile, - _open=_open) + _open_url=_open_url) # Save the metadata. metafile, _ = open_metadata(target, 'w', diff --git a/debugger_protocol/schema/_util.py b/debugger_protocol/schema/_util.py index a0bb544f..dbb70781 100644 --- a/debugger_protocol/schema/_util.py +++ b/debugger_protocol/schema/_util.py @@ -9,10 +9,10 @@ def open_url(url): return urllib.request.urlopen(url) -def get_revision(url, *, _open=open_url): +def get_revision(url, *, _open_url=open_url): """Return the revision corresponding to the given URL.""" if url.startswith('https://github.com/'): - return github_get_revision(url, _open=_open) + return github_get_revision(url, _open_url=_open_url) else: raise NotImplementedError @@ -35,7 +35,7 @@ GH_RESOURCE_RE = re.compile(r'^https://github.com' r'/(?P.*)$') -def github_get_revision(url, *, _open=open_url): +def github_get_revision(url, *, _open_url=open_url): """Return the full commit hash corresponding to the given URL.""" m = GH_RESOURCE_RE.match(url) if not m: @@ -44,7 +44,7 @@ def github_get_revision(url, *, _open=open_url): revurl = ('https://api.github.com/repos/{}/{}/commits/{}' ).format(org, repo, ref) - with _open(revurl) as revinfo: + with _open_url(revurl) as revinfo: raw = revinfo.read() data = json.loads(raw.decode()) return data['sha'] diff --git a/debugger_protocol/schema/upstream.py b/debugger_protocol/schema/upstream.py index b46a057d..db40f27e 100644 --- a/debugger_protocol/schema/upstream.py +++ b/debugger_protocol/schema/upstream.py @@ -10,10 +10,11 @@ from .metadata import Metadata URL = 'https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json' # noqa -def download(source, infile, outfile, *, _now=datetime.utcnow, _open=open_url): +def download(source, infile, outfile, *, + _now=datetime.utcnow, _open_url=open_url): """Return the corresponding metadata after downloading the schema file.""" timestamp = _now() - revision = get_revision(source, _open=_open) + revision = get_revision(source, _open_url=_open_url) data = infile.read() checksum = get_checksum(data) @@ -31,5 +32,5 @@ def read(url, *, _open_url=open_url): # TODO: Ensure it's a 404 error? raise SchemaFileError('schema file at {!r} not found'.format(url)) with infile: - upstream = download(url, infile, outfile, _open=_open_url) + upstream = download(url, infile, outfile, _open_url=_open_url) return outfile.getvalue(), upstream diff --git a/tests/debugger_protocol/schema/test_upstream.py b/tests/debugger_protocol/schema/test_upstream.py index 39b100b5..75c6503a 100644 --- a/tests/debugger_protocol/schema/test_upstream.py +++ b/tests/debugger_protocol/schema/test_upstream.py @@ -21,7 +21,7 @@ class DownloadTests(unittest.TestCase): infile, outfile, _now=(lambda: now), - _open=(lambda _: buf), + _open_url=(lambda _: buf), ) rcvd = outfile.getvalue() diff --git a/tests/debugger_protocol/schema/test_util.py b/tests/debugger_protocol/schema/test_util.py index d24a265e..e048105c 100644 --- a/tests/debugger_protocol/schema/test_util.py +++ b/tests/debugger_protocol/schema/test_util.py @@ -10,7 +10,7 @@ class GetRevisionTests(unittest.TestCase): buf = io.BytesIO( b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}') revision = get_revision('https://github.com/x/y/raw/master/z', - _open=lambda _: buf) + _open_url=lambda _: buf) self.assertEqual(revision, 'fc2395ca3564fb2afded8d90ddbe38dad1bf86f1') From cbfe55a56a4fb1d456833eae787246822ae09f4c Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Thu, 11 Jan 2018 18:24:46 +0000 Subject: [PATCH 13/32] Pass the file mode to _open(). --- debugger_protocol/schema/metadata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debugger_protocol/schema/metadata.py b/debugger_protocol/schema/metadata.py index c6cd1b84..c27a674c 100644 --- a/debugger_protocol/schema/metadata.py +++ b/debugger_protocol/schema/metadata.py @@ -19,7 +19,7 @@ def open_metadata(schemafile, mode='r', *, _open=open): filename = os.path.join(os.path.dirname(schemafile), os.path.basename(METADATA)) try: - return _open(filename), filename + return _open(filename, mode), filename except FileNotFoundError as exc: raise MetadataError( 'metadata file for {!r} not found'.format(schemafile)) From a93d0ed25c8a6b36adaacd10c205877b62cda4d5 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Thu, 11 Jan 2018 18:36:14 +0000 Subject: [PATCH 14/32] Print progress messages during download. --- debugger_protocol/schema/__main__.py | 8 ++++++-- tests/debugger_protocol/schema/test___main__.py | 8 +++++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/debugger_protocol/schema/__main__.py b/debugger_protocol/schema/__main__.py index fb9817a6..74908227 100644 --- a/debugger_protocol/schema/__main__.py +++ b/debugger_protocol/schema/__main__.py @@ -21,17 +21,21 @@ def as_command(name): def handle_download(source=UPSTREAM, target=VENDORED, *, _open=open, _open_url=open_url): # Download the schema file. + print('downloading the schema file from {}...'.format(source)) with _open_url(source) as infile: with _open(target, 'wb') as outfile: meta = download(source, infile, outfile, _open_url=_open_url) + print('...schema file written to {}.'.format(target)) # Save the metadata. - metafile, _ = open_metadata(target, 'w', - _open=_open) + print('saving the schema metadata...') + metafile, filename = open_metadata(target, 'w', + _open=_open) with metafile: metafile.write( meta.format()) + print('...metadata written to {}.'.format(filename)) @as_command('check') diff --git a/tests/debugger_protocol/schema/test___main__.py b/tests/debugger_protocol/schema/test___main__.py index 9238c0ff..a044403e 100644 --- a/tests/debugger_protocol/schema/test___main__.py +++ b/tests/debugger_protocol/schema/test___main__.py @@ -4,6 +4,7 @@ from textwrap import dedent import unittest from .helpers import StubOpener +from debugger_protocol.schema.vendored import FILENAME as VENDORED, METADATA from debugger_protocol.schema.__main__ import ( COMMANDS, handle_download, handle_check) @@ -58,7 +59,12 @@ class HandleDownloadTests(unittest.TestCase): revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 checksum: e778c3751f9d0bceaf8d5aa81e2c659f """).strip()) # noqa - self.assertEqual(stdout.getvalue(), '') + self.assertEqual(stdout.getvalue(), dedent("""\ + downloading the schema file from https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json... + ...schema file written to {}. + saving the schema metadata... + ...metadata written to {}. + """).format(VENDORED, METADATA)) # noqa class HandleCheckTests(unittest.TestCase): From 7981cc1f9e66183dee683e530fa64fd394403b2a Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Thu, 11 Jan 2018 18:50:51 +0000 Subject: [PATCH 15/32] Return a default value from get_revision() if URL unrecognized. --- debugger_protocol/schema/_util.py | 2 +- tests/debugger_protocol/schema/test_util.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/debugger_protocol/schema/_util.py b/debugger_protocol/schema/_util.py index dbb70781..79d08a77 100644 --- a/debugger_protocol/schema/_util.py +++ b/debugger_protocol/schema/_util.py @@ -14,7 +14,7 @@ def get_revision(url, *, _open_url=open_url): if url.startswith('https://github.com/'): return github_get_revision(url, _open_url=_open_url) else: - raise NotImplementedError + return '' def get_checksum(data): diff --git a/tests/debugger_protocol/schema/test_util.py b/tests/debugger_protocol/schema/test_util.py index e048105c..f636a36c 100644 --- a/tests/debugger_protocol/schema/test_util.py +++ b/tests/debugger_protocol/schema/test_util.py @@ -14,6 +14,12 @@ class GetRevisionTests(unittest.TestCase): self.assertEqual(revision, 'fc2395ca3564fb2afded8d90ddbe38dad1bf86f1') + def test_unrecognized_url(self): + revision = get_revision('https://localhost/schema.json', + _open_url=lambda _: io.BytesIO()) + + self.assertEqual(revision, '') + class GetChecksumTests(unittest.TestCase): From 137f671ee450e4ad92fda16f227d668d3efbde94 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Thu, 11 Jan 2018 20:04:36 +0000 Subject: [PATCH 16/32] Add a testing helper for serving HTTP locally. --- tests/helpers/__init__.py | 0 tests/helpers/http.py | 61 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+) create mode 100644 tests/helpers/__init__.py create mode 100644 tests/helpers/http.py diff --git a/tests/helpers/__init__.py b/tests/helpers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/helpers/http.py b/tests/helpers/http.py new file mode 100644 index 00000000..e7ded9f8 --- /dev/null +++ b/tests/helpers/http.py @@ -0,0 +1,61 @@ +import http.server +import threading + + +class Server: + """Wraps an http.server.HTTPServer in a thread.""" + + def __init__(self, handler, host='', port=8000): + self.handler = handler + self._addr = (host, port) + self._server = None + self._thread = None + + @property + def address(self): + host, port = self._addr + if host == '': + host = 'localhost' + return '{}:{}'.format(host, port) + + def start(self): + if self._server is not None: + raise RuntimeError('already started') + self._server = http.server.HTTPServer(self._addr, self.handler) + self._thread = threading.Thread( + target=lambda: self._server.serve_forever()) + self._thread.start() + + def stop(self): + if self._server is None: + raise RuntimeError('not running') + self._server.shutdown() + self._thread.join() + self._server.server_close() + self._thread = None + self._server = None + + def __enter__(self): + self.start() + return self + + def __exit__(self, *args): + self.stop() + + +def json_file_handler(data): + """Return an HTTP handler that always serves the given JSON bytes.""" + + class HTTPHandler(http.server.BaseHTTPRequestHandler): + def do_GET(self): + self.send_response(200) + self.send_header('Content-Type', b'application/json') + self.send_header('Content-Length', + str(len(data)).encode('ascii')) + self.end_headers() + self.wfile.write(data) + + def log_message(self, *args, **kwargs): + pass + + return HTTPHandler From 4032a5fb81b135f0e721ae13dac90ed419234931 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Thu, 11 Jan 2018 20:27:30 +0000 Subject: [PATCH 17/32] Always show the full unittest help. --- tests/__main__.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/tests/__main__.py b/tests/__main__.py index c61e50ca..94622a6a 100644 --- a/tests/__main__.py +++ b/tests/__main__.py @@ -9,6 +9,7 @@ PROJECT_ROOT = os.path.dirname(TEST_ROOT) def convert_argv(argv): + help = False args = [] modules = set() for arg in argv: @@ -26,15 +27,17 @@ def convert_argv(argv): mod = mod.replace(os.sep, '.') arg = mod if not test else mod + '.' + test modules.add(mod) + elif arg in ('-h', '--help'): + help = True args.append(arg) - if not modules: + cmd = [sys.executable + ' -m unittest'] # ...how unittest.main() likes it. + if not modules and not help: # Do discovery. - args = ['discover', + cmd += ['discover', '--start-directory', PROJECT_ROOT, - ] + args - return [sys.executable + ' -m unittest'] + args - #return [sys.executable, '-m', 'unittest'] + args + ] + return cmd + args if __name__ == '__main__': From 4b7f82a5a21eb01cec118cfe1fe6711194d36754 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Thu, 11 Jan 2018 20:53:48 +0000 Subject: [PATCH 18/32] Add a test option for running just the ptvsd unit tests. --- Makefile | 8 ++++++-- tests/__main__.py | 19 +++++++++++++++--- tests/ptvsd/__init__.py | 0 tests/test_tests___main__.py | 38 ++++++++++++++++++++++++------------ 4 files changed, 48 insertions(+), 17 deletions(-) create mode 100644 tests/ptvsd/__init__.py diff --git a/Makefile b/Makefile index 0693541f..30e0d25f 100644 --- a/Makefile +++ b/Makefile @@ -14,8 +14,12 @@ lint: $(PYTHON) -m flake8 --ignore E24,E121,E123,E125,E126,E221,E226,E266,E704,E265 $(CURDIR) .PHONY: test -test: ## Run the test suite. - $(PYTHON) -m tests +test: ## Run the test suite. + $(PYTHON) -m tests --full + +.PHONY: test-quick +test-quick: + $(PYTHON) -m tests --quick .PHONY: coverage coverage: ## Check line coverage. diff --git a/tests/__main__.py b/tests/__main__.py index 94622a6a..a4e57523 100644 --- a/tests/__main__.py +++ b/tests/__main__.py @@ -10,9 +10,16 @@ PROJECT_ROOT = os.path.dirname(TEST_ROOT) def convert_argv(argv): help = False + quick = False args = [] modules = set() for arg in argv: + if arg == '--quick': + quick = True + continue + if arg == '--full': + quick = False + continue # Unittest's main has only flags and positional args. # So we don't worry about options with values. if not arg.startswith('-'): @@ -34,9 +41,15 @@ def convert_argv(argv): cmd = [sys.executable + ' -m unittest'] # ...how unittest.main() likes it. if not modules and not help: # Do discovery. - cmd += ['discover', - '--start-directory', PROJECT_ROOT, - ] + if quick: + start = os.path.join(TEST_ROOT, 'ptvsd') + else: + start = PROJECT_ROOT + cmd += [ + 'discover', + '--top-level-directory', PROJECT_ROOT, + '--start-directory', start, + ] return cmd + args diff --git a/tests/ptvsd/__init__.py b/tests/ptvsd/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_tests___main__.py b/tests/test_tests___main__.py index dc25c4e4..0f1bc7d4 100644 --- a/tests/test_tests___main__.py +++ b/tests/test_tests___main__.py @@ -6,18 +6,41 @@ import sys from .__main__ import convert_argv -PROJECT_ROOT = os.path.dirname(os.path.dirname(__file__)) +TEST_ROOT = os.path.dirname(__file__) +PROJECT_ROOT = os.path.dirname(TEST_ROOT) class ConvertArgsTests(unittest.TestCase): - def test_discovery(self): - argv = convert_argv(['-v', '--failfast']) + def test_no_args(self): + argv = convert_argv([]) self.assertEqual(argv, [ sys.executable + ' -m unittest', 'discover', + '--top-level-directory', PROJECT_ROOT, '--start-directory', PROJECT_ROOT, + ]) + + def test_discovery_full(self): + argv = convert_argv(['-v', '--failfast', '--full']) + + self.assertEqual(argv, [ + sys.executable + ' -m unittest', + 'discover', + '--top-level-directory', PROJECT_ROOT, + '--start-directory', PROJECT_ROOT, + '-v', '--failfast', + ]) + + def test_discovery_quick(self): + argv = convert_argv(['-v', '--failfast', '--quick']) + + self.assertEqual(argv, [ + sys.executable + ' -m unittest', + 'discover', + '--top-level-directory', PROJECT_ROOT, + '--start-directory', os.path.join(TEST_ROOT, 'ptvsd'), '-v', '--failfast', ]) @@ -35,12 +58,3 @@ class ConvertArgsTests(unittest.TestCase): 'x.y.Spam.test_spam', 'z.Eggs', ]) - - def test_no_args(self): - argv = convert_argv([]) - - self.assertEqual(argv, [ - sys.executable + ' -m unittest', - 'discover', - '--start-directory', PROJECT_ROOT, - ]) From a2f8e4bcbaaefe874ca2f379bfb93975862bfe84 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Thu, 11 Jan 2018 20:54:54 +0000 Subject: [PATCH 19/32] Only run the ptvsd unit tests if using Python 2. --- tests/__main__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/__main__.py b/tests/__main__.py index a4e57523..99faffbe 100644 --- a/tests/__main__.py +++ b/tests/__main__.py @@ -43,6 +43,8 @@ def convert_argv(argv): # Do discovery. if quick: start = os.path.join(TEST_ROOT, 'ptvsd') + elif sys.version_info[0] != 3: + start = os.path.join(TEST_ROOT, 'ptvsd') else: start = PROJECT_ROOT cmd += [ From a74925e313dc61121ddd374ae89f1d4d976072a2 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Thu, 11 Jan 2018 22:03:38 +0000 Subject: [PATCH 20/32] Add http.error_handler(). --- tests/helpers/http.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tests/helpers/http.py b/tests/helpers/http.py index e7ded9f8..435bf414 100644 --- a/tests/helpers/http.py +++ b/tests/helpers/http.py @@ -59,3 +59,16 @@ def json_file_handler(data): pass return HTTPHandler + + +def error_handler(code, msg): + """Return an HTTP handler that always returns the given error code.""" + + class HTTPHandler(http.server.BaseHTTPRequestHandler): + def do_GET(self): + self.send_error(code, msg) + + def log_message(self, *args, **kwargs): + pass + + return HTTPHandler From 73b914c2f6c1d347a3e38ed7ad65497acf331553 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Thu, 11 Jan 2018 22:04:17 +0000 Subject: [PATCH 21/32] Add "system" tests for the schema script. --- debugger_protocol/schema/__main__.py | 18 +- debugger_protocol/schema/vendored.py | 6 +- tests/system_tests/test_schema.py | 262 +++++++++++++++++++++++++++ 3 files changed, 279 insertions(+), 7 deletions(-) diff --git a/debugger_protocol/schema/__main__.py b/debugger_protocol/schema/__main__.py index 74908227..40c675a0 100644 --- a/debugger_protocol/schema/__main__.py +++ b/debugger_protocol/schema/__main__.py @@ -39,13 +39,20 @@ def handle_download(source=UPSTREAM, target=VENDORED, *, @as_command('check') -def handle_check(schemafile=VENDORED, *, _open=open, _open_url=open_url): +def handle_check(schemafile=VENDORED, upstream=None, *, + _open=open, _open_url=open_url): print('checking local schema file...') - check_local(schemafile, - _open=_open) + try: + check_local(schemafile, + _open=_open) + except Exception as exc: + sys.exit('ERROR: {}'.format(exc)) print('comparing with upstream schema file...') - check_upstream(schemafile, - _open=_open, _open_url=_open_url) + try: + check_upstream(schemafile, url=upstream, + _open=_open, _open_url=_open_url) + except Exception as exc: + sys.exit('ERROR: {}'.format(exc)) print('schema file okay') @@ -77,6 +84,7 @@ def parse_args(argv=sys.argv[1:], prog=None): check = subs.add_parser('check') check.add_argument('--schemafile', default=VENDORED) + check.add_argument('--upstream', default=None) args = parser.parse_args(argv) if args.command is None: diff --git a/debugger_protocol/schema/vendored.py b/debugger_protocol/schema/vendored.py index 26b54690..27c4cb27 100644 --- a/debugger_protocol/schema/vendored.py +++ b/debugger_protocol/schema/vendored.py @@ -54,11 +54,13 @@ def check_local(filename, *, _open=open): raise SchemaFileMismatchError(filename, actual, meta) -def check_upstream(filename, *, _open=open, _open_url=open_url): +def check_upstream(filename, url=None, *, _open=open, _open_url=open_url): """Ensure that the local metadata file matches the upstream schema file.""" # Get the vendored and upstream metadata. meta, _ = read_metadata(filename, _open=_open) - _, upmeta = upstream.read(meta.upstream, _open_url=_open_url) + if url is None: + url = meta.upstream + _, upmeta = upstream.read(url, _open_url=_open_url) # Make sure the revision and checksum match. if meta.revision != upmeta.revision: diff --git a/tests/system_tests/test_schema.py b/tests/system_tests/test_schema.py index f020e86e..5019ae33 100644 --- a/tests/system_tests/test_schema.py +++ b/tests/system_tests/test_schema.py @@ -1,7 +1,13 @@ import contextlib import io +import os.path +import subprocess +import sys +import tempfile +from textwrap import dedent import unittest +from tests.helpers import http from debugger_protocol.schema.__main__ import handle_check @@ -17,3 +23,259 @@ class VendoredSchemaTests(unittest.TestCase): self.fail(str(exc)) result = stdout.getvalue().strip().splitlines()[-1] self.assertEqual(result, 'schema file okay') + + +class DownloadCommandTests(unittest.TestCase): + + CMD = '{python} -m debugger_protocol.schema download' + + def setUp(self): + super().setUp() + self._tempdir = tempfile.TemporaryDirectory(prefix='ptvsd-test-') + self.dirname = self._tempdir.name + self.schemafile = os.path.join(self.dirname, 'schema.json') + self.metadata = os.path.join(self.dirname, 'UPSTREAM') + + self.cmd = self.CMD.format(python=sys.executable) + self.args = self.cmd.split() + [ + '--target', self.schemafile, + ] + + def tearDown(self): + self._tempdir.cleanup() + super().tearDown() + + def test_default_source(self): + res = subprocess.run(self.args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + + self.assertEqual(res.returncode, 0) + self.assertEqual(res.stdout.decode(), dedent("""\ + downloading the schema file from https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json... + ...schema file written to {}. + saving the schema metadata... + ...metadata written to {}. + """).format(self.schemafile, self.metadata)) # noqa + self.assertEqual(res.stderr, b'') + + def test_custom_source(self): + handler = http.json_file_handler(b'') + with http.Server(handler) as srv: + upstream = 'http://{}/schema.json'.format(srv.address) + res = subprocess.run(self.args + ['--source', upstream], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout = res.stdout.decode() if res.stdout else '' + stderr = res.stderr.decode() if res.stderr else '' + + # Check the command result. + self.assertEqual(res.returncode, 0) + self.assertEqual(stdout, dedent("""\ + downloading the schema file from http://localhost:8000/schema.json... + ...schema file written to {}. + saving the schema metadata... + ...metadata written to {}. + """).format(self.schemafile, self.metadata)) # noqa + self.assertEqual(stderr, '') + + # Check the downloaded files. + with open(self.schemafile) as schemafile: + data = schemafile.read() + with open(self.metadata) as metafile: + metadata = metafile.read() + orig = metadata + metadata = '\n'.join(line + for line in metadata.split('\n') + if not line.startswith('downloaded: ')) + self.assertEqual(data, "") + self.assertEqual(metadata, dedent("""\ + upstream: http://localhost:8000/schema.json + revision: + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + """)) + self.assertNotEqual(metadata, orig) + + +class CheckCommandTests(unittest.TestCase): + + CMD = '{python} -m debugger_protocol.schema check' + + def setUp(self): + super().setUp() + self.tempdir = None + self.cmd = self.CMD.format(python=sys.executable) + self.args = self.cmd.split() + + def tearDown(self): + if self.tempdir is not None: + self.tempdir.cleanup() + super().tearDown() + + def resolve_filename(self, name): + if self.tempdir is None: + self.tempdir = tempfile.TemporaryDirectory(prefix='ptvsd-test-') + return os.path.join(self.tempdir.name, name) + + def add_file(self, name, content): + filename = self.resolve_filename(name) + with open(filename, 'w') as outfile: + outfile.write(content) + return filename + + def test_match(self): + schemafile = self.add_file('schema.json', '') + self.add_file('UPSTREAM', dedent("""\ + upstream: https://x.y.z/a/b/c/debugProtocol.json + revision: + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + downloaded: 2018-01-09 13:10:59 (UTC) + """)) + handler = http.json_file_handler(b'') + with http.Server(handler) as srv: + upstream = 'http://{}/schema.json'.format(srv.address) + args = self.args + [ + '--schemafile', schemafile, + '--upstream', upstream, + ] + res = subprocess.run(args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout = res.stdout.decode() if res.stdout else '' + stderr = res.stderr.decode() if res.stderr else '' + + # Check the command result. + self.assertEqual(res.returncode, 0) + self.assertEqual(stdout, dedent("""\ + checking local schema file... + comparing with upstream schema file... + schema file okay + """)) + self.assertEqual(stderr, '') + + def test_schema_missing(self): + schemafile = self.resolve_filename('schema.json') + self.add_file('UPSTREAM', dedent("""\ + upstream: https://x.y.z/a/b/c/debugProtocol.json + revision: + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + downloaded: 2018-01-09 13:10:59 (UTC) + """)) + args = self.args + [ + '--schemafile', schemafile, + '--upstream', '', + ] + res = subprocess.run(args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout = res.stdout.decode() if res.stdout else '' + stderr = res.stderr.decode() if res.stderr else '' + + # Check the command result. + self.assertEqual(res.returncode, 1) + self.assertEqual(stdout, dedent("""\ + checking local schema file... + """)) + self.assertRegex(stderr.strip(), r"ERROR: schema file '[^']*schema.json' not found") # noqa + + def test_metadata_missing(self): + schemafile = self.add_file('schema.json', '') + args = self.args + [ + '--schemafile', schemafile, + '--upstream', '', + ] + res = subprocess.run(args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout = res.stdout.decode() if res.stdout else '' + stderr = res.stderr.decode() if res.stderr else '' + + # Check the command result. + self.assertEqual(res.returncode, 1) + self.assertEqual(stdout, dedent("""\ + checking local schema file... + """)) + self.assertRegex(stderr.strip(), r"ERROR: metadata file for '[^']*schema.json' not found") # noqa + + def test_metadata_mismatch(self): + schemafile = self.add_file('schema.json', '') + self.add_file('UPSTREAM', dedent("""\ + upstream: https://x.y.z/a/b/c/debugProtocol.json + revision: + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + downloaded: 2018-01-09 13:10:59 (UTC) + """)) + args = self.args + [ + '--schemafile', schemafile, + '--upstream', '', + ] + res = subprocess.run(args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout = res.stdout.decode() if res.stdout else '' + stderr = res.stderr.decode() if res.stderr else '' + + # Check the command result. + self.assertEqual(res.returncode, 1) + self.assertEqual(stdout, dedent("""\ + checking local schema file... + """)) + self.assertRegex(stderr.strip(), r"ERROR: schema file '[^']*schema.json' does not match metadata file .*") # noqa + + def test_upstream_not_found(self): + schemafile = self.add_file('schema.json', '') + self.add_file('UPSTREAM', dedent("""\ + upstream: https://x.y.z/a/b/c/debugProtocol.json + revision: + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + downloaded: 2018-01-09 13:10:59 (UTC) + """)) + handler = http.error_handler(404, 'schema not found') + with http.Server(handler) as srv: + upstream = 'http://{}/schema.json'.format(srv.address) + args = self.args + [ + '--schemafile', schemafile, + '--upstream', upstream, + ] + res = subprocess.run(args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout = res.stdout.decode() if res.stdout else '' + stderr = res.stderr.decode() if res.stderr else '' + + # Check the command result. + self.assertEqual(res.returncode, 1) + self.assertEqual(stdout, dedent("""\ + checking local schema file... + comparing with upstream schema file... + """)) + self.assertEqual(stderr.strip(), "ERROR: schema file at 'http://localhost:8000/schema.json' not found") # noqa + + def test_upstream_mismatch(self): + schemafile = self.add_file('schema.json', '') + self.add_file('UPSTREAM', dedent("""\ + upstream: https://x.y.z/a/b/c/debugProtocol.json + revision: + checksum: e778c3751f9d0bceaf8d5aa81e2c659f + downloaded: 2018-01-09 13:10:59 (UTC) + """)) + handler = http.json_file_handler(b'') + with http.Server(handler) as srv: + upstream = 'http://{}/schema.json'.format(srv.address) + args = self.args + [ + '--schemafile', schemafile, + '--upstream', upstream, + ] + res = subprocess.run(args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout = res.stdout.decode() if res.stdout else '' + stderr = res.stderr.decode() if res.stderr else '' + + # Check the command result. + self.assertEqual(res.returncode, 1) + self.assertEqual(stdout, dedent("""\ + checking local schema file... + comparing with upstream schema file... + """)) + self.assertRegex(stderr.strip(), r"ERROR: local schema file '[^']*schema.json' does not match upstream .*") # noqa From caba7f0566dd96e11f8a0bbc700aaee455fae5bd Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Mon, 22 Jan 2018 16:03:21 +0000 Subject: [PATCH 22/32] Do not in-line complex code unnecessarily. --- debugger_protocol/schema/__main__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/debugger_protocol/schema/__main__.py b/debugger_protocol/schema/__main__.py index 40c675a0..e0b3b111 100644 --- a/debugger_protocol/schema/__main__.py +++ b/debugger_protocol/schema/__main__.py @@ -30,11 +30,11 @@ def handle_download(source=UPSTREAM, target=VENDORED, *, # Save the metadata. print('saving the schema metadata...') + formatted = meta.format() metafile, filename = open_metadata(target, 'w', _open=_open) with metafile: - metafile.write( - meta.format()) + metafile.write(formatted) print('...metadata written to {}.'.format(filename)) From c5b632659210a1191760cc411ad49498d72962b8 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Mon, 22 Jan 2018 16:10:40 +0000 Subject: [PATCH 23/32] Build the GitHub URL more clearly. --- debugger_protocol/schema/_util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debugger_protocol/schema/_util.py b/debugger_protocol/schema/_util.py index 79d08a77..0b43c736 100644 --- a/debugger_protocol/schema/_util.py +++ b/debugger_protocol/schema/_util.py @@ -57,4 +57,4 @@ def github_url_replace_ref(url, newref): raise ValueError('invalid GitHub resource URL: {!r}'.format(url)) org, repo, kind, _, path = m.groups() parts = (org, repo, kind, newref, path) - return 'https://github.com/' + '/'.join(parts) + return 'https://github.com/{}/{}/{}/{}/{}'.format(*parts) From f53d8e45bb8633e12ec6313814be97899841514a Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Mon, 22 Jan 2018 16:23:33 +0000 Subject: [PATCH 24/32] Add a note about a circular import. --- debugger_protocol/schema/metadata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debugger_protocol/schema/metadata.py b/debugger_protocol/schema/metadata.py index c27a674c..8d23edfd 100644 --- a/debugger_protocol/schema/metadata.py +++ b/debugger_protocol/schema/metadata.py @@ -15,7 +15,7 @@ def open_metadata(schemafile, mode='r', *, _open=open): Also return the metadata file's filename. """ - from .vendored import METADATA + from .vendored import METADATA # Here due to a circular import. filename = os.path.join(os.path.dirname(schemafile), os.path.basename(METADATA)) try: From 689a93cdd81c5ef244ff3f8d6abb96baa3c6297c Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Mon, 22 Jan 2018 16:57:00 +0000 Subject: [PATCH 25/32] if -> elif --- tests/__main__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/__main__.py b/tests/__main__.py index 99faffbe..2e1a0321 100644 --- a/tests/__main__.py +++ b/tests/__main__.py @@ -17,9 +17,10 @@ def convert_argv(argv): if arg == '--quick': quick = True continue - if arg == '--full': + elif arg == '--full': quick = False continue + # Unittest's main has only flags and positional args. # So we don't worry about options with values. if not arg.startswith('-'): From 14a333b9909be5cda38ceca8dd26f2eb2012e77f Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Mon, 22 Jan 2018 17:00:50 +0000 Subject: [PATCH 26/32] Clarify about unittest.main(). --- tests/__main__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/__main__.py b/tests/__main__.py index 2e1a0321..93baf17e 100644 --- a/tests/__main__.py +++ b/tests/__main__.py @@ -39,7 +39,9 @@ def convert_argv(argv): help = True args.append(arg) - cmd = [sys.executable + ' -m unittest'] # ...how unittest.main() likes it. + # We make the "executable" a single arg because unittest.main() + # doesn't work if we split it into 3 parts. + cmd = [sys.executable + ' -m unittest'] if not modules and not help: # Do discovery. if quick: From f1daf0ae5d5dd79969007e9459dfbb516f8f9102 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Mon, 22 Jan 2018 17:05:32 +0000 Subject: [PATCH 27/32] Add a comment to clarify why some tests are Py3-only. --- tests/debugger_protocol/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/debugger_protocol/__init__.py b/tests/debugger_protocol/__init__.py index bd305792..2efc3d52 100644 --- a/tests/debugger_protocol/__init__.py +++ b/tests/debugger_protocol/__init__.py @@ -2,5 +2,8 @@ import sys import unittest +# The code under the debugger_protocol package isn't used +# by the debugger (it's used by schema-related tools). So we don't need +# to support Python 2. if sys.version_info[0] == 2: raise unittest.SkipTest('not tested under Python 2') From 506782d500ba34ede22a1e143b765c31ff8c561d Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Mon, 22 Jan 2018 17:11:14 +0000 Subject: [PATCH 28/32] Do not escape single quotes. --- tests/debugger_protocol/schema/test_vendored.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/debugger_protocol/schema/test_vendored.py b/tests/debugger_protocol/schema/test_vendored.py index 839dc902..9c4f79c0 100644 --- a/tests/debugger_protocol/schema/test_vendored.py +++ b/tests/debugger_protocol/schema/test_vendored.py @@ -37,9 +37,9 @@ class CheckLocalTests(unittest.TestCase): with self.assertRaises(SchemaFileMismatchError) as cm: check_local('schema.json', _open=opener.open) self.assertEqual(str(cm.exception), - ('schema file \'schema.json\' does not match ' + ("schema file 'schema.json' does not match " 'metadata file (checksum mismatch: ' - '\'e778c3751f9d0bceaf8d5aa81e2c659f\' != \'abc2\')')) + "'e778c3751f9d0bceaf8d5aa81e2c659f' != 'abc2')")) def test_metafile_missing(self): metafile = None @@ -106,9 +106,9 @@ class CheckUpstream(unittest.TestCase): check_upstream('schema.json', _open=opener.open, _open_url=opener.open) self.assertEqual(str(cm.exception), - ('local schema file \'schema.json\' does not match ' - 'upstream \'https://github.com/x/y/raw/master/z\' ' - '(revision mismatch: \'abc2\' != \'fc2395ca3564fb2afded8d90ddbe38dad1bf86f1\')')) # noqa + ("local schema file 'schema.json' does not match " + "upstream 'https://github.com/x/y/raw/master/z' " + "(revision mismatch: 'abc2' != 'fc2395ca3564fb2afded8d90ddbe38dad1bf86f1')")) # noqa def test_checksum_mismatch(self): metafile = io.StringIO(dedent(""" @@ -126,9 +126,9 @@ class CheckUpstream(unittest.TestCase): check_upstream('schema.json', _open=opener.open, _open_url=opener.open) self.assertEqual(str(cm.exception), - ('local schema file \'schema.json\' does not match ' - 'upstream \'https://github.com/x/y/raw/master/z\' ' - '(checksum mismatch: \'abc2\' != \'e778c3751f9d0bceaf8d5aa81e2c659f\')')) # noqa + ("local schema file 'schema.json' does not match " + "upstream 'https://github.com/x/y/raw/master/z' " + "(checksum mismatch: 'abc2' != 'e778c3751f9d0bceaf8d5aa81e2c659f')")) # noqa def test_metafile_missing(self): ... From ffcf7d99d87bd8930c9d383518b7a027052e9357 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Mon, 22 Jan 2018 17:29:59 +0000 Subject: [PATCH 29/32] Add missing tests. --- tests/debugger_protocol/schema/helpers.py | 7 ++++++- .../debugger_protocol/schema/test_vendored.py | 21 +++++++++++++++++-- 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/tests/debugger_protocol/schema/helpers.py b/tests/debugger_protocol/schema/helpers.py index 0144c948..f0f2bc46 100644 --- a/tests/debugger_protocol/schema/helpers.py +++ b/tests/debugger_protocol/schema/helpers.py @@ -1,3 +1,4 @@ +import urllib.error class StubOpener: @@ -11,5 +12,9 @@ class StubOpener: file = self.files.pop(0) if file is None: - raise FileNotFoundError + if args[0].startswith('http'): + raise urllib.error.HTTPError(args[0], 404, 'Not Found', + None, None) + else: + raise FileNotFoundError return file diff --git a/tests/debugger_protocol/schema/test_vendored.py b/tests/debugger_protocol/schema/test_vendored.py index 9c4f79c0..ddb0d604 100644 --- a/tests/debugger_protocol/schema/test_vendored.py +++ b/tests/debugger_protocol/schema/test_vendored.py @@ -131,7 +131,24 @@ class CheckUpstream(unittest.TestCase): "(checksum mismatch: 'abc2' != 'e778c3751f9d0bceaf8d5aa81e2c659f')")) # noqa def test_metafile_missing(self): - ... + metafile = None + opener = StubOpener(metafile) + + with self.assertRaises(MetadataError): + check_upstream('schema.json', + _open=opener.open, _open_url=opener.open) def test_url_resource_missing(self): - ... + metafile = io.StringIO(dedent(""" + upstream: https://github.com/x/y/raw/master/z + revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 + checksum: abc2 + downloaded: 2018-01-09 13:10:59 (UTC) + """)) + #schemafile = io.BytesIO(b'') + schemafile = None + opener = StubOpener(metafile, schemafile) + + with self.assertRaises(SchemaFileError): + check_upstream('schema.json', + _open=opener.open, _open_url=opener.open) From 1e54875b7d1c9d105c2e0a6376de2a82415ffae0 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Mon, 22 Jan 2018 17:39:52 +0000 Subject: [PATCH 30/32] Drop an unnecessary check. --- tests/system_tests/test_schema.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/system_tests/test_schema.py b/tests/system_tests/test_schema.py index 5019ae33..33b5b6a7 100644 --- a/tests/system_tests/test_schema.py +++ b/tests/system_tests/test_schema.py @@ -21,8 +21,6 @@ class VendoredSchemaTests(unittest.TestCase): handle_check() except Exception as exc: self.fail(str(exc)) - result = stdout.getvalue().strip().splitlines()[-1] - self.assertEqual(result, 'schema file okay') class DownloadCommandTests(unittest.TestCase): From b646cc50e70c2ae85ab0a40972bf4a96e55ac746 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Mon, 22 Jan 2018 17:42:39 +0000 Subject: [PATCH 31/32] Add a docstring to clarify the purpose of some tests. --- tests/system_tests/test_schema.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/system_tests/test_schema.py b/tests/system_tests/test_schema.py index 33b5b6a7..f7237d0e 100644 --- a/tests/system_tests/test_schema.py +++ b/tests/system_tests/test_schema.py @@ -12,6 +12,7 @@ from debugger_protocol.schema.__main__ import handle_check class VendoredSchemaTests(unittest.TestCase): + """Tests to make sure our vendored schema is up-to-date.""" def test_matches_upstream(self): stdout = io.StringIO() From b9e88548a6388b65633440ee18345e2a9bebb818 Mon Sep 17 00:00:00 2001 From: Eric Snow Date: Mon, 22 Jan 2018 18:24:34 +0000 Subject: [PATCH 32/32] Fix the vendored revision hash. --- debugger_protocol/schema/UPSTREAM | 2 +- debugger_protocol/schema/_util.py | 8 ++++---- tests/debugger_protocol/schema/test___main__.py | 4 ++-- tests/debugger_protocol/schema/test_upstream.py | 4 ++-- tests/debugger_protocol/schema/test_util.py | 2 +- tests/debugger_protocol/schema/test_vendored.py | 6 +++--- 6 files changed, 13 insertions(+), 13 deletions(-) diff --git a/debugger_protocol/schema/UPSTREAM b/debugger_protocol/schema/UPSTREAM index 3bbccb3d..a46fc369 100644 --- a/debugger_protocol/schema/UPSTREAM +++ b/debugger_protocol/schema/UPSTREAM @@ -1,4 +1,4 @@ upstream: https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json -revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1 +revision: 393919e9ae7e469c40bcd3625bd3c72f5412fb2a checksum: 24a370d038f7875f4db2631d5238fd17 downloaded: 2018-01-10 00:47:10 (UTC) diff --git a/debugger_protocol/schema/_util.py b/debugger_protocol/schema/_util.py index 0b43c736..84feba8e 100644 --- a/debugger_protocol/schema/_util.py +++ b/debugger_protocol/schema/_util.py @@ -40,14 +40,14 @@ def github_get_revision(url, *, _open_url=open_url): m = GH_RESOURCE_RE.match(url) if not m: raise ValueError('invalid GitHub resource URL: {!r}'.format(url)) - org, repo, _, ref, _ = m.groups() + org, repo, _, ref, path = m.groups() - revurl = ('https://api.github.com/repos/{}/{}/commits/{}' - ).format(org, repo, ref) + revurl = ('https://api.github.com/repos/{}/{}/commits?sha={}&path={}' + ).format(org, repo, ref, path) with _open_url(revurl) as revinfo: raw = revinfo.read() data = json.loads(raw.decode()) - return data['sha'] + return data[0]['sha'] def github_url_replace_ref(url, newref): diff --git a/tests/debugger_protocol/schema/test___main__.py b/tests/debugger_protocol/schema/test___main__.py index a044403e..7ff5bb4c 100644 --- a/tests/debugger_protocol/schema/test___main__.py +++ b/tests/debugger_protocol/schema/test___main__.py @@ -40,7 +40,7 @@ class HandleDownloadTests(unittest.TestCase): schemafile = io.BytesIO(b'') outfile = Outfile(b'') buf = io.BytesIO( - b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}') + b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]') metafile = Outfile('') opener = StubOpener(schemafile, outfile, buf, metafile) @@ -82,7 +82,7 @@ class HandleCheckTests(unittest.TestCase): io.StringIO(metadata), io.BytesIO(b''), # upstream io.BytesIO( - b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}'), + b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]'), ) stdout = io.StringIO() diff --git a/tests/debugger_protocol/schema/test_upstream.py b/tests/debugger_protocol/schema/test_upstream.py index 75c6503a..a251f3d7 100644 --- a/tests/debugger_protocol/schema/test_upstream.py +++ b/tests/debugger_protocol/schema/test_upstream.py @@ -16,7 +16,7 @@ class DownloadTests(unittest.TestCase): infile = io.BytesIO(b'') outfile = io.BytesIO() buf = io.BytesIO( - b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}') + b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]') meta = download('https://github.com/x/y/raw/master/z', infile, outfile, @@ -39,7 +39,7 @@ class ReadSchemaTests(unittest.TestCase): def test_success(self): schemafile = io.BytesIO(b'') buf = io.BytesIO( - b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}') + b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]') opener = StubOpener(schemafile, buf) data, meta = read('https://github.com/x/y/raw/master/z', _open_url=opener.open) diff --git a/tests/debugger_protocol/schema/test_util.py b/tests/debugger_protocol/schema/test_util.py index f636a36c..a4d1658c 100644 --- a/tests/debugger_protocol/schema/test_util.py +++ b/tests/debugger_protocol/schema/test_util.py @@ -8,7 +8,7 @@ class GetRevisionTests(unittest.TestCase): def test_github(self): buf = io.BytesIO( - b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}') + b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]') revision = get_revision('https://github.com/x/y/raw/master/z', _open_url=lambda _: buf) diff --git a/tests/debugger_protocol/schema/test_vendored.py b/tests/debugger_protocol/schema/test_vendored.py index ddb0d604..b47eadf0 100644 --- a/tests/debugger_protocol/schema/test_vendored.py +++ b/tests/debugger_protocol/schema/test_vendored.py @@ -83,7 +83,7 @@ class CheckUpstream(unittest.TestCase): """)) schemafile = io.BytesIO(b'') buf = io.BytesIO( - b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}') + b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]') opener = StubOpener(metafile, schemafile, buf) # This does not fail. @@ -99,7 +99,7 @@ class CheckUpstream(unittest.TestCase): """)) schemafile = io.BytesIO(b'') buf = io.BytesIO( - b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}') + b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]') opener = StubOpener(metafile, schemafile, buf) with self.assertRaises(SchemaFileMismatchError) as cm: @@ -119,7 +119,7 @@ class CheckUpstream(unittest.TestCase): """)) schemafile = io.BytesIO(b'') buf = io.BytesIO( - b'{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}') + b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]') opener = StubOpener(metafile, schemafile, buf) with self.assertRaises(SchemaFileMismatchError) as cm: