Merge pull request #6 from ericsnowcurrently/upstream-schema-test

Add tooling around the VSC debugger protocol.
This commit is contained in:
Eric Snow 2018-01-22 11:34:35 -07:00 committed by GitHub
commit 56b4a8e68c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
27 changed files with 4252 additions and 20 deletions

View file

@ -14,9 +14,17 @@ lint:
$(PYTHON) -m flake8 --ignore E24,E121,E123,E125,E126,E221,E226,E266,E704,E265 $(CURDIR)
.PHONY: test
test: ## Run the test suite.
$(PYTHON) -m tests
test: ## Run the test suite.
$(PYTHON) -m tests --full
.PHONY: test-quick
test-quick:
$(PYTHON) -m tests --quick
.PHONY: coverage
coverage: ## Check line coverage.
$(PYTHON) -m coverage run -m tests
.PHONY: check-schemafile
check-schemafile: ## Validate the vendored schema file.
python3 -m debugger_protocol.schema check

View file

View file

@ -0,0 +1,4 @@
upstream: https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json
revision: 393919e9ae7e469c40bcd3625bd3c72f5412fb2a
checksum: 24a370d038f7875f4db2631d5238fd17
downloaded: 2018-01-10 00:47:10 (UTC)

View file

@ -0,0 +1,4 @@
import os.path
DATA_DIR = os.path.dirname(__file__)

View file

@ -0,0 +1,103 @@
import argparse
import sys
from ._util import open_url
from .metadata import open_metadata
from .upstream import URL as UPSTREAM, download
from .vendored import FILENAME as VENDORED, check_local, check_upstream
COMMANDS = {}
def as_command(name):
def decorator(f):
COMMANDS[name] = f
return f
return decorator
@as_command('download')
def handle_download(source=UPSTREAM, target=VENDORED, *,
_open=open, _open_url=open_url):
# Download the schema file.
print('downloading the schema file from {}...'.format(source))
with _open_url(source) as infile:
with _open(target, 'wb') as outfile:
meta = download(source, infile, outfile,
_open_url=_open_url)
print('...schema file written to {}.'.format(target))
# Save the metadata.
print('saving the schema metadata...')
formatted = meta.format()
metafile, filename = open_metadata(target, 'w',
_open=_open)
with metafile:
metafile.write(formatted)
print('...metadata written to {}.'.format(filename))
@as_command('check')
def handle_check(schemafile=VENDORED, upstream=None, *,
_open=open, _open_url=open_url):
print('checking local schema file...')
try:
check_local(schemafile,
_open=_open)
except Exception as exc:
sys.exit('ERROR: {}'.format(exc))
print('comparing with upstream schema file...')
try:
check_upstream(schemafile, url=upstream,
_open=_open, _open_url=_open_url)
except Exception as exc:
sys.exit('ERROR: {}'.format(exc))
print('schema file okay')
#############################
# the script
def parse_args(argv=sys.argv[1:], prog=None):
if prog is None:
if __name__ == '__main__':
module = __spec__.name
pkg, _, mod = module.rpartition('.')
if not pkg:
module = mod
elif mod == '__main__':
module = pkg
prog = 'python3 -m {}'.format(module)
else:
prog = sys.argv[0]
parser = argparse.ArgumentParser(
prog=prog,
description='Manage the vendored VSC debugger protocol schema.',
)
subs = parser.add_subparsers(dest='command')
download = subs.add_parser('download')
download.add_argument('--source', default=UPSTREAM)
download.add_argument('--target', default=VENDORED)
check = subs.add_parser('check')
check.add_argument('--schemafile', default=VENDORED)
check.add_argument('--upstream', default=None)
args = parser.parse_args(argv)
if args.command is None:
parser.print_help()
parser.exit()
return args
def main(command, **kwargs):
handle_command = COMMANDS[command]
return handle_command(**kwargs)
if __name__ == '__main__':
args = parse_args()
main(**(vars(args)))

View file

@ -0,0 +1,60 @@
import hashlib
import json
import re
import urllib.request
def open_url(url):
"""Return a file-like object for (binary) reading the given URL."""
return urllib.request.urlopen(url)
def get_revision(url, *, _open_url=open_url):
"""Return the revision corresponding to the given URL."""
if url.startswith('https://github.com/'):
return github_get_revision(url, _open_url=_open_url)
else:
return '<unknown>'
def get_checksum(data):
"""Return the MD5 hash for the given data."""
m = hashlib.md5()
m.update(data)
return m.hexdigest()
##################################
# github
GH_RESOURCE_RE = re.compile(r'^https://github.com'
r'/(?P<org>[^/]*)'
r'/(?P<repo>[^/]*)'
r'/(?P<kind>[^/]*)'
r'/(?P<rev>[^/]*)'
r'/(?P<path>.*)$')
def github_get_revision(url, *, _open_url=open_url):
"""Return the full commit hash corresponding to the given URL."""
m = GH_RESOURCE_RE.match(url)
if not m:
raise ValueError('invalid GitHub resource URL: {!r}'.format(url))
org, repo, _, ref, path = m.groups()
revurl = ('https://api.github.com/repos/{}/{}/commits?sha={}&path={}'
).format(org, repo, ref, path)
with _open_url(revurl) as revinfo:
raw = revinfo.read()
data = json.loads(raw.decode())
return data[0]['sha']
def github_url_replace_ref(url, newref):
"""Return a new URL with the ref replaced."""
m = GH_RESOURCE_RE.match(url)
if not m:
raise ValueError('invalid GitHub resource URL: {!r}'.format(url))
org, repo, kind, _, path = m.groups()
parts = (org, repo, kind, newref, path)
return 'https://github.com/{}/{}/{}/{}/{}'.format(*parts)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,15 @@
class SchemaFileError(Exception):
"""A schema-file-related operation failed."""
def read_schema(filename, *, _open=open):
"""Return the data (bytes) in the given schema file."""
try:
schemafile = _open(filename, 'rb')
except FileNotFoundError as exc:
raise SchemaFileError(
'schema file {!r} not found'.format(filename))
with schemafile:
return schemafile.read()

View file

@ -0,0 +1,118 @@
from collections import namedtuple
from datetime import datetime
import os.path
from textwrap import dedent
from ._util import github_url_replace_ref
class MetadataError(Exception):
"""A metadata-related operation failed."""
def open_metadata(schemafile, mode='r', *, _open=open):
"""Return a file object for the metadata of the given schema file.
Also return the metadata file's filename.
"""
from .vendored import METADATA # Here due to a circular import.
filename = os.path.join(os.path.dirname(schemafile),
os.path.basename(METADATA))
try:
return _open(filename, mode), filename
except FileNotFoundError as exc:
raise MetadataError(
'metadata file for {!r} not found'.format(schemafile))
def read_metadata(schemafile, *, _open=open):
"""Return the metadata corresponding to the schema file.
Also return the path to the metadata file.
"""
metafile, filename = open_metadata(schemafile, _open=_open)
with metafile:
data = metafile.read()
try:
meta = Metadata.parse(data)
except Exception as exc:
raise MetadataError(
'metadata file {!r} not valid: {}'.format(filename, exc))
return meta, filename
class Metadata(
namedtuple('Metadata', 'upstream revision checksum downloaded')):
"""Info about the local copy of the upstream schema file."""
TIMESTAMP = '%Y-%m-%d %H:%M:%S (UTC)'
FORMAT = dedent("""\
upstream: {}
revision: {}
checksum: {}
downloaded: {:%s}
""") % TIMESTAMP
@classmethod
def parse(cls, data):
"""Return an instance based on the given metadata string."""
lines = data.splitlines()
kwargs = {}
for line in lines:
line = line.strip()
if line.startswith('#'):
continue
if not line:
continue
field, _, value = line.partition(':')
kwargs[field] = value.strip()
self = cls(**kwargs)
return self
def __new__(cls, upstream, revision, checksum, downloaded):
# coercion
upstream = str(upstream) if upstream else None
revision = str(revision) if revision else None
checksum = str(checksum) if checksum else None
if not downloaded:
downloaded = None
elif isinstance(downloaded, str):
downloaded = datetime.strptime(downloaded, cls.TIMESTAMP)
elif downloaded.tzinfo is not None:
downloaded -= downloaded.utcoffset()
self = super().__new__(cls, upstream, revision, checksum, downloaded)
return self
def __init__(self, *args, **kwargs):
# validation
if not self.upstream:
raise ValueError('missing upstream URL')
# TODO ensure upstream is URL?
if not self.revision:
raise ValueError('missing upstream revision')
# TODO ensure revision is a hash?
if not self.checksum:
raise ValueError('missing checksum')
# TODO ensure checksum is a MD5 hash?
if not self.downloaded:
raise ValueError('missing downloaded')
@property
def url(self):
if self.upstream.startswith('https://github.com/'):
return github_url_replace_ref(self.upstream, self.revision)
else:
raise NotImplementedError
def format(self):
"""Return a string containing the formatted metadata."""
return self.FORMAT.format(*self)

View file

@ -0,0 +1,36 @@
from datetime import datetime
import io
import urllib.error
from ._util import open_url, get_revision, get_checksum
from .file import SchemaFileError
from .metadata import Metadata
URL = 'https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json' # noqa
def download(source, infile, outfile, *,
_now=datetime.utcnow, _open_url=open_url):
"""Return the corresponding metadata after downloading the schema file."""
timestamp = _now()
revision = get_revision(source, _open_url=_open_url)
data = infile.read()
checksum = get_checksum(data)
outfile.write(data)
return Metadata(source, revision, checksum, timestamp)
def read(url, *, _open_url=open_url):
"""Return (data, metadata) for the given upstream URL."""
outfile = io.BytesIO()
try:
infile = _open_url(url)
except (FileNotFoundError, urllib.error.HTTPError) as exc:
# TODO: Ensure it's a 404 error?
raise SchemaFileError('schema file at {!r} not found'.format(url))
with infile:
upstream = download(url, infile, outfile, _open_url=_open_url)
return outfile.getvalue(), upstream

View file

@ -0,0 +1,69 @@
import os.path
from . import DATA_DIR, upstream
from ._util import open_url, get_checksum
from .file import SchemaFileError, read_schema
from .metadata import MetadataError, read_metadata
FILENAME = os.path.join(DATA_DIR, 'debugProtocol.json')
METADATA = os.path.join(DATA_DIR, 'UPSTREAM')
class SchemaFileMismatchError(SchemaFileError, MetadataError):
"""The schema file does not match expectations."""
@classmethod
def _build_message(cls, filename, actual, expected, upstream):
if upstream:
msg = ('local schema file {!r} does not match upstream {!r}'
).format(filename, expected.upstream)
else:
msg = ('schema file {!r} does not match metadata file'
).format(filename)
for field in actual._fields:
value = getattr(actual, field)
other = getattr(expected, field)
if value != other:
msg += (' ({} mismatch: {!r} != {!r})'
).format(field, value, other)
break
return msg
def __init__(self, filename, actual, expected, *, upstream=False):
super().__init__(
self._build_message(filename, actual, expected, upstream))
self.filename = filename
self.actual = actual
self.expected = expected
self.upstream = upstream
def check_local(filename, *, _open=open):
"""Ensure that the local schema file matches the local metadata file."""
# Get the vendored metadata and data.
meta, _ = read_metadata(filename, _open=_open)
data = read_schema(filename, _open=_open)
# Only worry about the checksum matching.
actual = meta._replace(
checksum=get_checksum(data))
if actual != meta:
raise SchemaFileMismatchError(filename, actual, meta)
def check_upstream(filename, url=None, *, _open=open, _open_url=open_url):
"""Ensure that the local metadata file matches the upstream schema file."""
# Get the vendored and upstream metadata.
meta, _ = read_metadata(filename, _open=_open)
if url is None:
url = meta.upstream
_, upmeta = upstream.read(url, _open_url=_open_url)
# Make sure the revision and checksum match.
if meta.revision != upmeta.revision:
raise SchemaFileMismatchError(filename, meta, upmeta, upstream=True)
if meta.checksum != upmeta.checksum:
raise SchemaFileMismatchError(filename, meta, upmeta, upstream=True)

View file

@ -9,9 +9,18 @@ PROJECT_ROOT = os.path.dirname(TEST_ROOT)
def convert_argv(argv):
help = False
quick = False
args = []
modules = set()
for arg in argv:
if arg == '--quick':
quick = True
continue
elif arg == '--full':
quick = False
continue
# Unittest's main has only flags and positional args.
# So we don't worry about options with values.
if not arg.startswith('-'):
@ -26,15 +35,27 @@ def convert_argv(argv):
mod = mod.replace(os.sep, '.')
arg = mod if not test else mod + '.' + test
modules.add(mod)
elif arg in ('-h', '--help'):
help = True
args.append(arg)
if not modules:
# We make the "executable" a single arg because unittest.main()
# doesn't work if we split it into 3 parts.
cmd = [sys.executable + ' -m unittest']
if not modules and not help:
# Do discovery.
args = ['discover',
'--start-directory', PROJECT_ROOT,
] + args
return [sys.executable + ' -m unittest'] + args
#return [sys.executable, '-m', 'unittest'] + args
if quick:
start = os.path.join(TEST_ROOT, 'ptvsd')
elif sys.version_info[0] != 3:
start = os.path.join(TEST_ROOT, 'ptvsd')
else:
start = PROJECT_ROOT
cmd += [
'discover',
'--top-level-directory', PROJECT_ROOT,
'--start-directory', start,
]
return cmd + args
if __name__ == '__main__':

View file

@ -0,0 +1,9 @@
import sys
import unittest
# The code under the debugger_protocol package isn't used
# by the debugger (it's used by schema-related tools). So we don't need
# to support Python 2.
if sys.version_info[0] == 2:
raise unittest.SkipTest('not tested under Python 2')

View file

@ -0,0 +1,20 @@
import urllib.error
class StubOpener:
def __init__(self, *files):
self.files = list(files)
self.calls = []
def open(self, *args):
self.calls.append(args)
file = self.files.pop(0)
if file is None:
if args[0].startswith('http'):
raise urllib.error.HTTPError(args[0], 404, 'Not Found',
None, None)
else:
raise FileNotFoundError
return file

View file

@ -0,0 +1,98 @@
import contextlib
import io
from textwrap import dedent
import unittest
from .helpers import StubOpener
from debugger_protocol.schema.vendored import FILENAME as VENDORED, METADATA
from debugger_protocol.schema.__main__ import (
COMMANDS, handle_download, handle_check)
class Outfile:
def __init__(self, initial):
self.written = initial
def write(self, data):
self.written += data
return len(data)
def __enter__(self):
return self
def __exit__(self, *args):
pass
class CommandRegistryTests(unittest.TestCase):
def test_commands(self):
self.assertEqual(set(COMMANDS), {
'download',
'check',
})
class HandleDownloadTests(unittest.TestCase):
def test_default_args(self):
schemafile = io.BytesIO(b'<a schema>')
outfile = Outfile(b'')
buf = io.BytesIO(
b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]')
metafile = Outfile('')
opener = StubOpener(schemafile, outfile, buf, metafile)
stdout = io.StringIO()
with contextlib.redirect_stdout(stdout):
with contextlib.redirect_stderr(stdout):
handle_download(
_open=opener.open, _open_url=opener.open)
metadata = '\n'.join(line
for line in metafile.written.splitlines()
if not line.startswith('downloaded: '))
self.assertEqual(outfile.written, b'<a schema>')
self.assertEqual(metadata, dedent("""
upstream: https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json
revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1
checksum: e778c3751f9d0bceaf8d5aa81e2c659f
""").strip()) # noqa
self.assertEqual(stdout.getvalue(), dedent("""\
downloading the schema file from https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json...
...schema file written to {}.
saving the schema metadata...
...metadata written to {}.
""").format(VENDORED, METADATA)) # noqa
class HandleCheckTests(unittest.TestCase):
def test_default_args(self):
metadata = dedent("""
upstream: https://github.com/x/y/raw/master/z
revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1
checksum: e778c3751f9d0bceaf8d5aa81e2c659f
downloaded: 2018-01-09 13:10:59 (UTC)
""")
opener = StubOpener(
io.StringIO(metadata),
io.BytesIO(b'<a schema>'), # local
io.StringIO(metadata),
io.BytesIO(b'<a schema>'), # upstream
io.BytesIO(
b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]'),
)
stdout = io.StringIO()
with contextlib.redirect_stdout(stdout):
with contextlib.redirect_stderr(stdout):
handle_check(
_open=opener.open, _open_url=opener.open)
self.assertEqual(stdout.getvalue(), dedent("""\
checking local schema file...
comparing with upstream schema file...
schema file okay
"""))

View file

@ -0,0 +1,22 @@
import io
import unittest
from .helpers import StubOpener
from debugger_protocol.schema.file import SchemaFileError, read_schema
class ReadSchemaTests(unittest.TestCase):
def test_success(self):
schemafile = io.BytesIO(b'<a schema>')
opener = StubOpener(schemafile)
data = read_schema('schema.json', _open=opener.open)
self.assertEqual(data, b'<a schema>')
def test_file_missing(self):
opener = StubOpener(None)
with self.assertRaises(SchemaFileError):
read_schema('schema.json', _open=opener.open)

View file

@ -0,0 +1,210 @@
from datetime import datetime
import io
import os.path
from textwrap import dedent
import unittest
from .helpers import StubOpener
from debugger_protocol.schema.upstream import URL as UPSTREAM
from debugger_protocol.schema.metadata import (
open_metadata, read_metadata,
MetadataError, Metadata)
class Stringlike:
def __init__(self, value):
self.value = value
def __str__(self):
return self.value
class Hash(Stringlike):
pass
class OpenMetadataTests(unittest.TestCase):
def test_success(self):
expected = object()
opener = StubOpener(expected)
schemadir = os.path.join('x', 'y', 'z', '')
metafile, filename = open_metadata(schemadir + 'schema.json',
_open=opener.open)
self.assertIs(metafile, expected)
self.assertEqual(filename, schemadir + 'UPSTREAM')
def test_file_missing(self):
metafile = None
opener = StubOpener(metafile)
with self.assertRaises(MetadataError):
open_metadata('schema.json', _open=opener.open)
class ReadMetadataTests(unittest.TestCase):
def test_success(self):
metafile = io.StringIO(dedent("""
upstream: https://x.y.z/schema.json
revision: abcdef0123456789
checksum: deadbeefdeadbeefdeadbeefdeadbeef
downloaded: 2018-01-09 13:10:59 (UTC)
"""))
opener = StubOpener(metafile)
schemadir = os.path.join('x', 'y', 'z', '')
meta, filename = read_metadata(schemadir + 'schema.json',
_open=opener.open)
self.assertEqual(meta,
Metadata('https://x.y.z/schema.json',
'abcdef0123456789',
'deadbeefdeadbeefdeadbeefdeadbeef',
datetime(2018, 1, 9, 13, 10, 59),
))
self.assertEqual(filename, schemadir + 'UPSTREAM')
def test_file_missing(self):
metafile = None
opener = StubOpener(metafile)
with self.assertRaises(MetadataError):
read_metadata('schema.json', _open=opener.open)
def test_file_invalid(self):
metafile = io.StringIO('<bogus>')
opener = StubOpener(metafile)
with self.assertRaises(MetadataError):
read_metadata('schema.json', _open=opener.open)
class MetadataTests(unittest.TestCase):
def test_parse_minimal(self):
expected = Metadata('https://x.y.z/schema.json',
'abcdef0123456789',
'deadbeefdeadbeefdeadbeefdeadbeef',
datetime(2018, 1, 9, 13, 10, 59),
)
meta = Metadata.parse(dedent("""
upstream: https://x.y.z/schema.json
revision: abcdef0123456789
checksum: deadbeefdeadbeefdeadbeefdeadbeef
downloaded: 2018-01-09 13:10:59 (UTC)
"""))
self.assertEqual(meta, expected)
def test_parse_with_whitespace_and_comments(self):
expected = Metadata('https://x.y.z/schema.json',
'abcdef0123456789',
'deadbeefdeadbeefdeadbeefdeadbeef',
datetime(2018, 1, 9, 13, 10, 59),
)
meta = Metadata.parse(dedent("""
# generated by x.y.z
upstream: https://x.y.z/schema.json
revision: abcdef0123456789
checksum: deadbeefdeadbeefdeadbeefdeadbeef
downloaded: 2018-01-09 13:10:59 (UTC)
# done!
""")) # noqa
self.assertEqual(meta, expected)
def test_parse_roundtrip_from_object(self):
orig = Metadata('https://x.y.z/schema.json',
'abcdef0123456789',
'deadbeefdeadbeefdeadbeefdeadbeef',
datetime(2018, 1, 9, 13, 10, 59),
)
meta = Metadata.parse(
orig.format())
self.assertEqual(meta, orig)
def test_parse_roundtrip_from_string(self):
orig = dedent("""\
upstream: https://x.y.z/schema.json
revision: abcdef0123456789
checksum: deadbeefdeadbeefdeadbeefdeadbeef
downloaded: 2018-01-09 13:10:59 (UTC)
""")
data = (Metadata.parse(orig)
).format()
self.assertEqual(data, orig)
def test_coercion_noop(self):
meta = Metadata('https://x.y.z/schema.json',
'abcdef0123456789',
'deadbeefdeadbeefdeadbeefdeadbeef',
datetime(2018, 1, 9, 13, 10, 59),
)
self.assertEqual(meta, (
'https://x.y.z/schema.json',
'abcdef0123456789',
'deadbeefdeadbeefdeadbeefdeadbeef',
datetime(2018, 1, 9, 13, 10, 59),
))
def test_coercion_change_all(self):
meta = Metadata(Stringlike('https://x.y.z/schema.json'),
Hash('abcdef0123456789'),
Hash('deadbeefdeadbeefdeadbeefdeadbeef'),
'2018-01-09 13:10:59 (UTC)',
)
self.assertEqual(meta, (
'https://x.y.z/schema.json',
'abcdef0123456789',
'deadbeefdeadbeefdeadbeefdeadbeef',
datetime(2018, 1, 9, 13, 10, 59),
))
def test_validation_fail(self):
baseargs = [
'https://x.y.z/schema.json',
'abcdef0123456789',
'deadbeefdeadbeefdeadbeefdeadbeef',
datetime(2018, 1, 9, 13, 10, 59),
]
for i in range(len(baseargs)):
with self.subTest(baseargs[i]):
args = list(baseargs)
args[i] = ''
with self.assertRaises(ValueError):
Metadata(*args)
def test_url(self):
meta = Metadata(UPSTREAM,
'abcdef0123456789',
'deadbeefdeadbeefdeadbeefdeadbeef',
datetime(2018, 1, 9, 13, 10, 59),
)
url = meta.url
self.assertEqual(url, 'https://github.com/Microsoft/vscode-debugadapter-node/raw/abcdef0123456789/debugProtocol.json') # noqa
def test_format(self):
meta = Metadata('https://x.y.z/schema.json',
'abcdef0123456789',
'deadbeefdeadbeefdeadbeefdeadbeef',
datetime(2018, 1, 9, 13, 10, 59),
)
formatted = meta.format()
self.assertEqual(formatted, dedent("""\
upstream: https://x.y.z/schema.json
revision: abcdef0123456789
checksum: deadbeefdeadbeefdeadbeefdeadbeef
downloaded: 2018-01-09 13:10:59 (UTC)
"""))

View file

@ -0,0 +1,60 @@
from datetime import datetime
import io
import unittest
from .helpers import StubOpener
from debugger_protocol.schema.file import SchemaFileError
from debugger_protocol.schema.metadata import Metadata
from debugger_protocol.schema.upstream import (
download, read)
class DownloadTests(unittest.TestCase):
def test_success(self):
now = datetime.utcnow()
infile = io.BytesIO(b'<a schema>')
outfile = io.BytesIO()
buf = io.BytesIO(
b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]')
meta = download('https://github.com/x/y/raw/master/z',
infile,
outfile,
_now=(lambda: now),
_open_url=(lambda _: buf),
)
rcvd = outfile.getvalue()
self.assertEqual(meta, Metadata(
'https://github.com/x/y/raw/master/z',
'fc2395ca3564fb2afded8d90ddbe38dad1bf86f1',
'e778c3751f9d0bceaf8d5aa81e2c659f',
now,
))
self.assertEqual(rcvd, b'<a schema>')
class ReadSchemaTests(unittest.TestCase):
def test_success(self):
schemafile = io.BytesIO(b'<a schema>')
buf = io.BytesIO(
b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]')
opener = StubOpener(schemafile, buf)
data, meta = read('https://github.com/x/y/raw/master/z',
_open_url=opener.open)
self.assertEqual(data, b'<a schema>')
self.assertEqual(meta, Metadata(
'https://github.com/x/y/raw/master/z',
'fc2395ca3564fb2afded8d90ddbe38dad1bf86f1',
'e778c3751f9d0bceaf8d5aa81e2c659f',
meta.downloaded,
))
def test_resource_missing(self):
schemafile = None
opener = StubOpener(schemafile)
with self.assertRaises(SchemaFileError):
read('schema.json', _open_url=opener.open)

View file

@ -0,0 +1,35 @@
import io
import unittest
from debugger_protocol.schema._util import get_revision, get_checksum
class GetRevisionTests(unittest.TestCase):
def test_github(self):
buf = io.BytesIO(
b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]')
revision = get_revision('https://github.com/x/y/raw/master/z',
_open_url=lambda _: buf)
self.assertEqual(revision, 'fc2395ca3564fb2afded8d90ddbe38dad1bf86f1')
def test_unrecognized_url(self):
revision = get_revision('https://localhost/schema.json',
_open_url=lambda _: io.BytesIO())
self.assertEqual(revision, '<unknown>')
class GetChecksumTests(unittest.TestCase):
def test_checksums(self):
checksums = {
b'': 'd41d8cd98f00b204e9800998ecf8427e',
b'spam': 'e09f6a7593f8ae3994ea57e1117f67ec',
}
for data, expected in checksums.items():
with self.subTest(data):
checksum = get_checksum(data)
self.assertEqual(checksum, expected)

View file

@ -0,0 +1,154 @@
import io
from textwrap import dedent
import unittest
from .helpers import StubOpener
from debugger_protocol.schema.file import SchemaFileError
from debugger_protocol.schema.metadata import MetadataError
from debugger_protocol.schema.vendored import (
SchemaFileMismatchError, check_local, check_upstream)
class CheckLocalTests(unittest.TestCase):
def test_match(self):
metafile = io.StringIO(dedent("""
upstream: https://x.y.z/schema.json
revision: abcdef0123456789
checksum: e778c3751f9d0bceaf8d5aa81e2c659f
downloaded: 2018-01-09 13:10:59 (UTC)
"""))
schemafile = io.BytesIO(b'<a schema>')
opener = StubOpener(metafile, schemafile)
# This does not fail.
check_local('schema.json', _open=opener.open)
def test_mismatch(self):
metafile = io.StringIO(dedent("""
upstream: https://x.y.z/schema.json
revision: abcdef0123456789
checksum: abc2
downloaded: 2018-01-09 13:10:59 (UTC)
"""))
schemafile = io.BytesIO(b'<a schema>')
opener = StubOpener(metafile, schemafile)
with self.assertRaises(SchemaFileMismatchError) as cm:
check_local('schema.json', _open=opener.open)
self.assertEqual(str(cm.exception),
("schema file 'schema.json' does not match "
'metadata file (checksum mismatch: '
"'e778c3751f9d0bceaf8d5aa81e2c659f' != 'abc2')"))
def test_metafile_missing(self):
metafile = None
schemafile = io.BytesIO(b'<a schema>')
opener = StubOpener(metafile, schemafile)
with self.assertRaises(MetadataError):
check_local('schema.json', _open=opener.open)
def test_metafile_invalid(self):
metafile = io.StringIO('<bogus>')
metafile.name = '/x/y/z/UPSTREAM'
schemafile = io.BytesIO(b'<a schema>')
opener = StubOpener(metafile, schemafile)
with self.assertRaises(MetadataError):
check_local('schema.json', _open=opener.open)
def test_schemafile_missing(self):
metafile = io.StringIO(dedent("""
upstream: https://x.y.z/schema.json
revision: abcdef0123456789
checksum: e778c3751f9d0bceaf8d5aa81e2c659f
downloaded: 2018-01-09 13:10:59 (UTC)
"""))
schemafile = None
opener = StubOpener(metafile, schemafile)
with self.assertRaises(SchemaFileError):
check_local('schema.json', _open=opener.open)
class CheckUpstream(unittest.TestCase):
def test_match(self):
metafile = io.StringIO(dedent("""
upstream: https://github.com/x/y/raw/master/z
revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1
checksum: e778c3751f9d0bceaf8d5aa81e2c659f
downloaded: 2018-01-09 13:10:59 (UTC)
"""))
schemafile = io.BytesIO(b'<a schema>')
buf = io.BytesIO(
b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]')
opener = StubOpener(metafile, schemafile, buf)
# This does not fail.
check_upstream('schema.json',
_open=opener.open, _open_url=opener.open)
def test_revision_mismatch(self):
metafile = io.StringIO(dedent("""
upstream: https://github.com/x/y/raw/master/z
revision: abc2
checksum: e778c3751f9d0bceaf8d5aa81e2c659f
downloaded: 2018-01-09 13:10:59 (UTC)
"""))
schemafile = io.BytesIO(b'<a schema>')
buf = io.BytesIO(
b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]')
opener = StubOpener(metafile, schemafile, buf)
with self.assertRaises(SchemaFileMismatchError) as cm:
check_upstream('schema.json',
_open=opener.open, _open_url=opener.open)
self.assertEqual(str(cm.exception),
("local schema file 'schema.json' does not match "
"upstream 'https://github.com/x/y/raw/master/z' "
"(revision mismatch: 'abc2' != 'fc2395ca3564fb2afded8d90ddbe38dad1bf86f1')")) # noqa
def test_checksum_mismatch(self):
metafile = io.StringIO(dedent("""
upstream: https://github.com/x/y/raw/master/z
revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1
checksum: abc2
downloaded: 2018-01-09 13:10:59 (UTC)
"""))
schemafile = io.BytesIO(b'<a schema>')
buf = io.BytesIO(
b'[{"sha": "fc2395ca3564fb2afded8d90ddbe38dad1bf86f1"}]')
opener = StubOpener(metafile, schemafile, buf)
with self.assertRaises(SchemaFileMismatchError) as cm:
check_upstream('schema.json',
_open=opener.open, _open_url=opener.open)
self.assertEqual(str(cm.exception),
("local schema file 'schema.json' does not match "
"upstream 'https://github.com/x/y/raw/master/z' "
"(checksum mismatch: 'abc2' != 'e778c3751f9d0bceaf8d5aa81e2c659f')")) # noqa
def test_metafile_missing(self):
metafile = None
opener = StubOpener(metafile)
with self.assertRaises(MetadataError):
check_upstream('schema.json',
_open=opener.open, _open_url=opener.open)
def test_url_resource_missing(self):
metafile = io.StringIO(dedent("""
upstream: https://github.com/x/y/raw/master/z
revision: fc2395ca3564fb2afded8d90ddbe38dad1bf86f1
checksum: abc2
downloaded: 2018-01-09 13:10:59 (UTC)
"""))
#schemafile = io.BytesIO(b'<a schema>')
schemafile = None
opener = StubOpener(metafile, schemafile)
with self.assertRaises(SchemaFileError):
check_upstream('schema.json',
_open=opener.open, _open_url=opener.open)

View file

74
tests/helpers/http.py Normal file
View file

@ -0,0 +1,74 @@
import http.server
import threading
class Server:
"""Wraps an http.server.HTTPServer in a thread."""
def __init__(self, handler, host='', port=8000):
self.handler = handler
self._addr = (host, port)
self._server = None
self._thread = None
@property
def address(self):
host, port = self._addr
if host == '':
host = 'localhost'
return '{}:{}'.format(host, port)
def start(self):
if self._server is not None:
raise RuntimeError('already started')
self._server = http.server.HTTPServer(self._addr, self.handler)
self._thread = threading.Thread(
target=lambda: self._server.serve_forever())
self._thread.start()
def stop(self):
if self._server is None:
raise RuntimeError('not running')
self._server.shutdown()
self._thread.join()
self._server.server_close()
self._thread = None
self._server = None
def __enter__(self):
self.start()
return self
def __exit__(self, *args):
self.stop()
def json_file_handler(data):
"""Return an HTTP handler that always serves the given JSON bytes."""
class HTTPHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-Type', b'application/json')
self.send_header('Content-Length',
str(len(data)).encode('ascii'))
self.end_headers()
self.wfile.write(data)
def log_message(self, *args, **kwargs):
pass
return HTTPHandler
def error_handler(code, msg):
"""Return an HTTP handler that always returns the given error code."""
class HTTPHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
self.send_error(code, msg)
def log_message(self, *args, **kwargs):
pass
return HTTPHandler

0
tests/ptvsd/__init__.py Normal file
View file

View file

View file

@ -0,0 +1,280 @@
import contextlib
import io
import os.path
import subprocess
import sys
import tempfile
from textwrap import dedent
import unittest
from tests.helpers import http
from debugger_protocol.schema.__main__ import handle_check
class VendoredSchemaTests(unittest.TestCase):
"""Tests to make sure our vendored schema is up-to-date."""
def test_matches_upstream(self):
stdout = io.StringIO()
with contextlib.redirect_stdout(stdout):
with contextlib.redirect_stderr(stdout):
try:
handle_check()
except Exception as exc:
self.fail(str(exc))
class DownloadCommandTests(unittest.TestCase):
CMD = '{python} -m debugger_protocol.schema download'
def setUp(self):
super().setUp()
self._tempdir = tempfile.TemporaryDirectory(prefix='ptvsd-test-')
self.dirname = self._tempdir.name
self.schemafile = os.path.join(self.dirname, 'schema.json')
self.metadata = os.path.join(self.dirname, 'UPSTREAM')
self.cmd = self.CMD.format(python=sys.executable)
self.args = self.cmd.split() + [
'--target', self.schemafile,
]
def tearDown(self):
self._tempdir.cleanup()
super().tearDown()
def test_default_source(self):
res = subprocess.run(self.args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.assertEqual(res.returncode, 0)
self.assertEqual(res.stdout.decode(), dedent("""\
downloading the schema file from https://github.com/Microsoft/vscode-debugadapter-node/raw/master/debugProtocol.json...
...schema file written to {}.
saving the schema metadata...
...metadata written to {}.
""").format(self.schemafile, self.metadata)) # noqa
self.assertEqual(res.stderr, b'')
def test_custom_source(self):
handler = http.json_file_handler(b'<a schema>')
with http.Server(handler) as srv:
upstream = 'http://{}/schema.json'.format(srv.address)
res = subprocess.run(self.args + ['--source', upstream],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout = res.stdout.decode() if res.stdout else ''
stderr = res.stderr.decode() if res.stderr else ''
# Check the command result.
self.assertEqual(res.returncode, 0)
self.assertEqual(stdout, dedent("""\
downloading the schema file from http://localhost:8000/schema.json...
...schema file written to {}.
saving the schema metadata...
...metadata written to {}.
""").format(self.schemafile, self.metadata)) # noqa
self.assertEqual(stderr, '')
# Check the downloaded files.
with open(self.schemafile) as schemafile:
data = schemafile.read()
with open(self.metadata) as metafile:
metadata = metafile.read()
orig = metadata
metadata = '\n'.join(line
for line in metadata.split('\n')
if not line.startswith('downloaded: '))
self.assertEqual(data, "<a schema>")
self.assertEqual(metadata, dedent("""\
upstream: http://localhost:8000/schema.json
revision: <unknown>
checksum: e778c3751f9d0bceaf8d5aa81e2c659f
"""))
self.assertNotEqual(metadata, orig)
class CheckCommandTests(unittest.TestCase):
CMD = '{python} -m debugger_protocol.schema check'
def setUp(self):
super().setUp()
self.tempdir = None
self.cmd = self.CMD.format(python=sys.executable)
self.args = self.cmd.split()
def tearDown(self):
if self.tempdir is not None:
self.tempdir.cleanup()
super().tearDown()
def resolve_filename(self, name):
if self.tempdir is None:
self.tempdir = tempfile.TemporaryDirectory(prefix='ptvsd-test-')
return os.path.join(self.tempdir.name, name)
def add_file(self, name, content):
filename = self.resolve_filename(name)
with open(filename, 'w') as outfile:
outfile.write(content)
return filename
def test_match(self):
schemafile = self.add_file('schema.json', '<a schema>')
self.add_file('UPSTREAM', dedent("""\
upstream: https://x.y.z/a/b/c/debugProtocol.json
revision: <unknown>
checksum: e778c3751f9d0bceaf8d5aa81e2c659f
downloaded: 2018-01-09 13:10:59 (UTC)
"""))
handler = http.json_file_handler(b'<a schema>')
with http.Server(handler) as srv:
upstream = 'http://{}/schema.json'.format(srv.address)
args = self.args + [
'--schemafile', schemafile,
'--upstream', upstream,
]
res = subprocess.run(args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout = res.stdout.decode() if res.stdout else ''
stderr = res.stderr.decode() if res.stderr else ''
# Check the command result.
self.assertEqual(res.returncode, 0)
self.assertEqual(stdout, dedent("""\
checking local schema file...
comparing with upstream schema file...
schema file okay
"""))
self.assertEqual(stderr, '')
def test_schema_missing(self):
schemafile = self.resolve_filename('schema.json')
self.add_file('UPSTREAM', dedent("""\
upstream: https://x.y.z/a/b/c/debugProtocol.json
revision: <unknown>
checksum: e778c3751f9d0bceaf8d5aa81e2c659f
downloaded: 2018-01-09 13:10:59 (UTC)
"""))
args = self.args + [
'--schemafile', schemafile,
'--upstream', '<a URL>',
]
res = subprocess.run(args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout = res.stdout.decode() if res.stdout else ''
stderr = res.stderr.decode() if res.stderr else ''
# Check the command result.
self.assertEqual(res.returncode, 1)
self.assertEqual(stdout, dedent("""\
checking local schema file...
"""))
self.assertRegex(stderr.strip(), r"ERROR: schema file '[^']*schema.json' not found") # noqa
def test_metadata_missing(self):
schemafile = self.add_file('schema.json', '<a schema>')
args = self.args + [
'--schemafile', schemafile,
'--upstream', '<a URL>',
]
res = subprocess.run(args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout = res.stdout.decode() if res.stdout else ''
stderr = res.stderr.decode() if res.stderr else ''
# Check the command result.
self.assertEqual(res.returncode, 1)
self.assertEqual(stdout, dedent("""\
checking local schema file...
"""))
self.assertRegex(stderr.strip(), r"ERROR: metadata file for '[^']*schema.json' not found") # noqa
def test_metadata_mismatch(self):
schemafile = self.add_file('schema.json', '<other schema>')
self.add_file('UPSTREAM', dedent("""\
upstream: https://x.y.z/a/b/c/debugProtocol.json
revision: <unknown>
checksum: e778c3751f9d0bceaf8d5aa81e2c659f
downloaded: 2018-01-09 13:10:59 (UTC)
"""))
args = self.args + [
'--schemafile', schemafile,
'--upstream', '<a URL>',
]
res = subprocess.run(args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout = res.stdout.decode() if res.stdout else ''
stderr = res.stderr.decode() if res.stderr else ''
# Check the command result.
self.assertEqual(res.returncode, 1)
self.assertEqual(stdout, dedent("""\
checking local schema file...
"""))
self.assertRegex(stderr.strip(), r"ERROR: schema file '[^']*schema.json' does not match metadata file .*") # noqa
def test_upstream_not_found(self):
schemafile = self.add_file('schema.json', '<a schema>')
self.add_file('UPSTREAM', dedent("""\
upstream: https://x.y.z/a/b/c/debugProtocol.json
revision: <unknown>
checksum: e778c3751f9d0bceaf8d5aa81e2c659f
downloaded: 2018-01-09 13:10:59 (UTC)
"""))
handler = http.error_handler(404, 'schema not found')
with http.Server(handler) as srv:
upstream = 'http://{}/schema.json'.format(srv.address)
args = self.args + [
'--schemafile', schemafile,
'--upstream', upstream,
]
res = subprocess.run(args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout = res.stdout.decode() if res.stdout else ''
stderr = res.stderr.decode() if res.stderr else ''
# Check the command result.
self.assertEqual(res.returncode, 1)
self.assertEqual(stdout, dedent("""\
checking local schema file...
comparing with upstream schema file...
"""))
self.assertEqual(stderr.strip(), "ERROR: schema file at 'http://localhost:8000/schema.json' not found") # noqa
def test_upstream_mismatch(self):
schemafile = self.add_file('schema.json', '<a schema>')
self.add_file('UPSTREAM', dedent("""\
upstream: https://x.y.z/a/b/c/debugProtocol.json
revision: <unknown>
checksum: e778c3751f9d0bceaf8d5aa81e2c659f
downloaded: 2018-01-09 13:10:59 (UTC)
"""))
handler = http.json_file_handler(b'<other schema>')
with http.Server(handler) as srv:
upstream = 'http://{}/schema.json'.format(srv.address)
args = self.args + [
'--schemafile', schemafile,
'--upstream', upstream,
]
res = subprocess.run(args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout = res.stdout.decode() if res.stdout else ''
stderr = res.stderr.decode() if res.stderr else ''
# Check the command result.
self.assertEqual(res.returncode, 1)
self.assertEqual(stdout, dedent("""\
checking local schema file...
comparing with upstream schema file...
"""))
self.assertRegex(stderr.strip(), r"ERROR: local schema file '[^']*schema.json' does not match upstream .*") # noqa

View file

@ -6,18 +6,41 @@ import sys
from .__main__ import convert_argv
PROJECT_ROOT = os.path.dirname(os.path.dirname(__file__))
TEST_ROOT = os.path.dirname(__file__)
PROJECT_ROOT = os.path.dirname(TEST_ROOT)
class ConvertArgsTests(unittest.TestCase):
def test_discovery(self):
argv = convert_argv(['-v', '--failfast'])
def test_no_args(self):
argv = convert_argv([])
self.assertEqual(argv, [
sys.executable + ' -m unittest',
'discover',
'--top-level-directory', PROJECT_ROOT,
'--start-directory', PROJECT_ROOT,
])
def test_discovery_full(self):
argv = convert_argv(['-v', '--failfast', '--full'])
self.assertEqual(argv, [
sys.executable + ' -m unittest',
'discover',
'--top-level-directory', PROJECT_ROOT,
'--start-directory', PROJECT_ROOT,
'-v', '--failfast',
])
def test_discovery_quick(self):
argv = convert_argv(['-v', '--failfast', '--quick'])
self.assertEqual(argv, [
sys.executable + ' -m unittest',
'discover',
'--top-level-directory', PROJECT_ROOT,
'--start-directory', os.path.join(TEST_ROOT, 'ptvsd'),
'-v', '--failfast',
])
@ -35,12 +58,3 @@ class ConvertArgsTests(unittest.TestCase):
'x.y.Spam.test_spam',
'z.Eggs',
])
def test_no_args(self):
argv = convert_argv([])
self.assertEqual(argv, [
sys.executable + ' -m unittest',
'discover',
'--start-directory', PROJECT_ROOT,
])