Consider relative paths as library paths. Fixes #1946 Fixes #2027

This commit also makes sure that unhandled exceptions raised
in libraries are shown if some frame from its stack is in user
code (when justMyCode:true).
This commit is contained in:
Fabio Zadrozny 2020-01-09 15:19:49 -03:00
parent a66e624fb4
commit 0149646a4d
13 changed files with 621 additions and 56 deletions

View file

@ -113,32 +113,27 @@ def stop_on_unhandled_exception(py_db, thread, additional_info, arg):
if exctype is SystemExit and py_db.ignore_system_exit_code(value):
return
if py_db.exclude_exception_by_filter(exception_breakpoint, tb, True):
return
frames = []
user_frame = None
while tb:
frame = tb.tb_frame
if exception_breakpoint.ignore_libraries and py_db.in_project_scope(frame):
while tb is not None:
if not py_db.exclude_exception_by_filter(exception_breakpoint, tb):
user_frame = tb.tb_frame
frames.append(tb.tb_frame)
tb = tb.tb_next
if user_frame is None:
return
frames_byid = dict([(id(frame), frame) for frame in frames])
if exception_breakpoint.ignore_libraries and user_frame is not None:
frame = user_frame
else:
frame = frames[-1]
add_exception_to_frame(frame, arg)
add_exception_to_frame(user_frame, arg)
if exception_breakpoint.condition is not None:
eval_result = py_db.handle_breakpoint_condition(additional_info, exception_breakpoint, frame)
eval_result = py_db.handle_breakpoint_condition(additional_info, exception_breakpoint, user_frame)
if not eval_result:
return
if exception_breakpoint.expression is not None:
py_db.handle_breakpoint_expression(exception_breakpoint, additional_info, frame)
py_db.handle_breakpoint_expression(exception_breakpoint, additional_info, user_frame)
try:
additional_info.pydev_message = exception_breakpoint.qname
@ -147,7 +142,7 @@ def stop_on_unhandled_exception(py_db, thread, additional_info, arg):
pydev_log.debug('Handling post-mortem stop on exception breakpoint %s' % (exception_breakpoint.qname,))
py_db.do_stop_on_unhandled_exception(thread, frame, frames_byid, arg)
py_db.do_stop_on_unhandled_exception(thread, user_frame, frames_byid, arg)
def get_exception_class(kls):

View file

@ -7362,7 +7362,7 @@ static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_8should_st
* if not eval_result:
* return False, frame # <<<<<<<<<<<<<<
*
* if main_debugger.exclude_exception_by_filter(exception_breakpoint, trace, False):
* if main_debugger.exclude_exception_by_filter(exception_breakpoint, trace):
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_5 = PyTuple_New(2); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 302, __pyx_L1_error)
@ -7398,7 +7398,7 @@ static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_8should_st
/* "_pydevd_bundle/pydevd_cython.pyx":304
* return False, frame
*
* if main_debugger.exclude_exception_by_filter(exception_breakpoint, trace, False): # <<<<<<<<<<<<<<
* if main_debugger.exclude_exception_by_filter(exception_breakpoint, trace): # <<<<<<<<<<<<<<
* pydev_log.debug("Ignore exception %s in library %s -- (%s)" % (exception, frame.f_code.co_filename, frame.f_code.co_name))
* return False, frame
*/
@ -7418,22 +7418,22 @@ static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_8should_st
}
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(__pyx_t_4)) {
PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_v_exception_breakpoint, __pyx_v_trace, Py_False};
__pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_12, 3+__pyx_t_12); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 304, __pyx_L1_error)
PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_v_exception_breakpoint, __pyx_v_trace};
__pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_12, 2+__pyx_t_12); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 304, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_GOTREF(__pyx_t_5);
} else
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) {
PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_v_exception_breakpoint, __pyx_v_trace, Py_False};
__pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_12, 3+__pyx_t_12); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 304, __pyx_L1_error)
PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_v_exception_breakpoint, __pyx_v_trace};
__pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_12, 2+__pyx_t_12); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 304, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_GOTREF(__pyx_t_5);
} else
#endif
{
__pyx_t_14 = PyTuple_New(3+__pyx_t_12); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 304, __pyx_L1_error)
__pyx_t_14 = PyTuple_New(2+__pyx_t_12); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 304, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_14);
if (__pyx_t_1) {
__Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_14, 0, __pyx_t_1); __pyx_t_1 = NULL;
@ -7444,9 +7444,6 @@ static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_8should_st
__Pyx_INCREF(__pyx_v_trace);
__Pyx_GIVEREF(__pyx_v_trace);
PyTuple_SET_ITEM(__pyx_t_14, 1+__pyx_t_12, __pyx_v_trace);
__Pyx_INCREF(Py_False);
__Pyx_GIVEREF(Py_False);
PyTuple_SET_ITEM(__pyx_t_14, 2+__pyx_t_12, Py_False);
__pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_14, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 304, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0;
@ -7458,7 +7455,7 @@ static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_8should_st
/* "_pydevd_bundle/pydevd_cython.pyx":305
*
* if main_debugger.exclude_exception_by_filter(exception_breakpoint, trace, False):
* if main_debugger.exclude_exception_by_filter(exception_breakpoint, trace):
* pydev_log.debug("Ignore exception %s in library %s -- (%s)" % (exception, frame.f_code.co_filename, frame.f_code.co_name)) # <<<<<<<<<<<<<<
* return False, frame
*
@ -7511,7 +7508,7 @@ static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_8should_st
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
/* "_pydevd_bundle/pydevd_cython.pyx":306
* if main_debugger.exclude_exception_by_filter(exception_breakpoint, trace, False):
* if main_debugger.exclude_exception_by_filter(exception_breakpoint, trace):
* pydev_log.debug("Ignore exception %s in library %s -- (%s)" % (exception, frame.f_code.co_filename, frame.f_code.co_name))
* return False, frame # <<<<<<<<<<<<<<
*
@ -7533,7 +7530,7 @@ static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_8should_st
/* "_pydevd_bundle/pydevd_cython.pyx":304
* return False, frame
*
* if main_debugger.exclude_exception_by_filter(exception_breakpoint, trace, False): # <<<<<<<<<<<<<<
* if main_debugger.exclude_exception_by_filter(exception_breakpoint, trace): # <<<<<<<<<<<<<<
* pydev_log.debug("Ignore exception %s in library %s -- (%s)" % (exception, frame.f_code.co_filename, frame.f_code.co_name))
* return False, frame
*/

View file

@ -301,7 +301,7 @@ cdef class PyDBFrame:
if not eval_result:
return False, frame
if main_debugger.exclude_exception_by_filter(exception_breakpoint, trace, False):
if main_debugger.exclude_exception_by_filter(exception_breakpoint, trace):
pydev_log.debug("Ignore exception %s in library %s -- (%s)" % (exception, frame.f_code.co_filename, frame.f_code.co_name))
return False, frame

View file

@ -152,7 +152,7 @@ class PyDBFrame:
if not eval_result:
return False, frame
if main_debugger.exclude_exception_by_filter(exception_breakpoint, trace, False):
if main_debugger.exclude_exception_by_filter(exception_breakpoint, trace):
pydev_log.debug("Ignore exception %s in library %s -- (%s)" % (exception, frame.f_code.co_filename, frame.f_code.co_name))
return False, frame

View file

@ -1099,18 +1099,13 @@ class PyDB(object):
self._apply_filter_cache[cache_key] = False
return False
def exclude_exception_by_filter(self, exception_breakpoint, trace, is_uncaught):
def exclude_exception_by_filter(self, exception_breakpoint, trace):
if not exception_breakpoint.ignore_libraries and not self._exclude_filters_enabled:
return False
if trace is None:
return True
# We need to get the place where it was raised if it's an uncaught exception...
if is_uncaught:
while trace.tb_next is not None:
trace = trace.tb_next
ignore_libraries = exception_breakpoint.ignore_libraries
exclude_filters_enabled = self._exclude_filters_enabled

View file

@ -56,7 +56,7 @@ from functools import partial
_nt_os_normcase = ntpath.normcase
basename = os.path.basename
exists = os.path.exists
os_path_exists = os.path.exists
join = os.path.join
try:
@ -71,6 +71,31 @@ except:
# realpath is a no-op on systems without islink support
rPath = os.path.abspath
def _get_library_dir():
library_dir = None
try:
import sysconfig
library_dir = sysconfig.get_path('purelib')
except ImportError:
pass # i.e.: Only 2.7 onwards
if library_dir is None or not os_path_exists(library_dir):
for path in sys.path:
if os_path_exists(path) and os.path.basename(path) == 'site-packages':
library_dir = path
break
if library_dir is None or not os_path_exists(library_dir):
library_dir = os.path.dirname(os.__file__)
return library_dir
# Note: we can't call sysconfig.get_path from _NormPath (it deadlocks on Python 2.7) so, we
# need to get the library dir during module loading.
_library_dir = _get_library_dir()
# defined as a list of tuples where the 1st element of the tuple is the path in the client machine
# and the 2nd element is the path in the server machine.
# see module docstring for more details.
@ -182,7 +207,7 @@ if sys.platform == 'win32':
if '~' in filename:
filename = convert_to_long_pathname(filename)
if filename.startswith('<') or not os.path.exists(filename):
if filename.startswith('<') or not os_path_exists(filename):
return filename # Not much we can do.
drive, parts = os.path.splitdrive(os.path.normpath(filename))
@ -200,7 +225,7 @@ if sys.platform == 'win32':
try:
return _resolve_listing(drive, iter(parts))
except FileNotFoundError:
if os.path.exists(filename):
if os_path_exists(filename):
# This is really strange, ask the user to report as error.
sys.stderr.write('\npydev debugger: critical: unable to get real case for file. Details:\n'
'filename: %s\ndrive: %s\nparts: %s\n'
@ -331,29 +356,48 @@ def _NormPaths(filename, NORM_PATHS_CONTAINER=NORM_PATHS_CONTAINER):
if os_path is None: # Interpreter shutdown
return filename, filename
os_path_abspath = os.path.abspath
os_path_abspath = os_path.abspath
os_path_isabs = os_path.isabs
if os_path_abspath is None: # Interpreter shutdown
if os_path_abspath is None or os_path_isabs is None or rPath is None: # Interpreter shutdown
return filename, filename
if rPath is None: # Interpreter shutdown
return filename, filename
isabs = os_path_isabs(filename)
abs_path = _NormPath(filename, os_path_abspath)
real_path = _NormPath(filename, rPath)
abs_path = _NormPath(filename, os_path_abspath, isabs)
real_path = _NormPath(filename, rPath, isabs)
# cache it for fast access later
NORM_PATHS_CONTAINER[filename] = abs_path, real_path
return abs_path, real_path
def _NormPath(filename, normpath):
def _get_relative_filename_abs_path(filename, normpath, os_path_exists=os_path_exists):
# If we have a relative path and the file does not exist when made absolute, try to
# resolve it based on the sys.path entries.
for p in sys.path:
r = normpath(os.path.join(p, filename))
if os_path_exists(r):
return r
# We couldn't find the real file for the relative path. Resolve it as if it was in
# a library (so that it's considered a library file and not a project file).
r = normpath(os.path.join(_library_dir, filename))
return r
def _NormPath(filename, normpath, isabs, os_path_exists=os_path_exists, join=join):
if filename.startswith('<'):
# Not really a file, rather a synthetic name like <string> or <ipython-...>;
# shouldn't be normalized.
return filename
r = normpath(filename)
if not isabs:
if not os_path_exists(r):
r = _get_relative_filename_abs_path(filename, normpath)
ind = r.find('.zip')
if ind == -1:
ind = r.find('.egg')
@ -385,8 +429,13 @@ _NOT_FOUND_SENTINEL = object()
def exists(file):
if os.path.exists(file):
return file
if os_path_exists(file):
return True
if not os.path.isabs(file):
file = _get_relative_filename_abs_path(file, os.path.abspath)
if os_path_exists(file):
return True
ind = file.find('.zip')
if ind == -1:
@ -425,8 +474,8 @@ def exists(file):
return join(zip_path, inner_path)
except KeyError:
return None
return None
return False
return False
# Now, let's do a quick test to see if we're working with a version of python that has no problems
@ -436,7 +485,7 @@ try:
code = rPath.func_code
except AttributeError:
code = rPath.__code__
if not exists(_NormFile(code.co_filename)):
if not os_path_exists(_NormFile(code.co_filename)):
sys.stderr.write('-------------------------------------------------------------------------------\n')
sys.stderr.write('pydev debugger: CRITICAL WARNING: This version of python seems to be incorrectly compiled (internal generated filenames are not absolute)\n')
sys.stderr.write('pydev debugger: The debugger may still function, but it will work slower and may miss breakpoints.\n')
@ -453,11 +502,11 @@ try:
return NORM_SEARCH_CACHE[filename]
except KeyError:
abs_path, real_path = initial_norm_paths(filename)
if not exists(real_path):
if not os_path_exists(real_path):
# We must actually go on and check if we can find it as if it was a relative path for some of the paths in the pythonpath
for path in sys.path:
abs_path, real_path = initial_norm_paths(join(path, filename))
if exists(real_path):
if os_path_exists(real_path):
break
else:
sys.stderr.write('pydev debugger: Unable to find real location for: %s\n' % (filename,))
@ -613,7 +662,7 @@ def setup_client_server_paths(paths):
if found_translation:
translated = _NormFile(translated)
else:
if not os.path.exists(translated):
if not os_path_exists(translated):
if not translated.startswith('<'):
# This is a configuration error, so, write it always so
# that the user can fix it.

View file

@ -0,0 +1,13 @@
import sys
filename = sys.argv[1]
obj = compile('''
def call_exception_in_exec():
a = 10
b = 20
raise Exception('TEST SUCEEDED')
''', filename, 'exec')
exec(obj)
call_exception_in_exec() # @UndefinedVariable

View file

@ -4,6 +4,7 @@ from _pydevd_bundle.pydevd_constants import IS_WINDOWS, IS_PY2
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
import io
from _pydev_bundle.pydev_log import log_context
from _pydevd_bundle import pydevd_filtering
def test_convert_utilities(tmpdir):
@ -259,6 +260,48 @@ def test_to_server_and_to_client(tmpdir):
pydevd_file_utils.setup_client_server_paths([])
def test_relative_paths(tmpdir):
'''
We need to check that we can deal with relative paths.
Use cases:
- Relative path of file that does not exist:
Use case is a cython-generated module which is generated from a .pyx which
is not distributed. In this case we need to resolve the file to a library path file.
- Relative path of a file that exists but not when resolved from the working directory:
Use case is a cython-generated module which is generated from a .pyx which is
distributed. In this case we need to resolve to the real file based on the sys.path
entries.
'''
import pydevd_file_utils
import sys
sys.path.append(str(tmpdir))
try:
pydevd_file_utils.NORM_PATHS_AND_BASE_CONTAINER.clear()
pydevd_file_utils.NORM_PATHS_CONTAINER.clear()
abs_path = pydevd_file_utils.get_abs_path_real_path_and_base_from_file('my_dir/my_file.pyx')[0]
assert 'site-packages' in abs_path
assert os.path.normcase(str(tmpdir)) not in abs_path
assert not pydevd_file_utils.exists('my_dir/my_file.pyx')
# If the relative file exists when joined with some entry in the PYTHONPATH we'll consider
# that the relative path points to that absolute path.
target_dir = os.path.join(str(tmpdir), 'my_dir')
os.makedirs(target_dir)
with open(os.path.join(target_dir, 'my_file.pyx'), 'w') as stream:
stream.write('empty')
pydevd_file_utils.NORM_PATHS_AND_BASE_CONTAINER.clear()
pydevd_file_utils.NORM_PATHS_CONTAINER.clear()
abs_path = pydevd_file_utils.get_abs_path_real_path_and_base_from_file('my_dir/my_file.pyx')[0]
assert 'site-packages' not in abs_path
assert os.path.normcase(str(tmpdir)) in abs_path
assert pydevd_file_utils.exists('my_dir/my_file.pyx')
finally:
sys.path.remove(str(tmpdir))
def test_zip_paths(tmpdir):
import pydevd_file_utils
import sys

View file

@ -3423,6 +3423,16 @@ def test_exception_on_filtered_file(case_setup):
)
writer.write_make_initial_run()
# Note: the unhandled exception was initially raised in a file which is filtered out, but we
# should be able to see the frames which are part of the project.
hit = writer.wait_for_breakpoint_hit(
REASON_UNCAUGHT_EXCEPTION,
file='my_code_exception_on_other.py',
line=writer.get_line_index_with_content('other.raise_exception()')
)
writer.write_run_thread(hit.thread_id)
writer.finished_ok = True

View file

@ -599,6 +599,77 @@ def test_case_handled_exception_breaks(case_setup):
writer.finished_ok = True
@pytest.mark.parametrize('target', [
'absolute',
'relative',
])
@pytest.mark.parametrize('just_my_code', [
True,
False,
])
def test_case_unhandled_exception_just_my_code(case_setup, target, just_my_code):
def check_test_suceeded_msg(writer, stdout, stderr):
# Don't call super (we have an unhandled exception in the stack trace).
return 'TEST SUCEEDED' in ''.join(stderr)
def additional_output_checks(writer, stdout, stderr):
if 'call_exception_in_exec()' not in stderr:
raise AssertionError('Expected test to have an unhandled exception.\nstdout:\n%s\n\nstderr:\n%s' % (
stdout, stderr))
def get_environ(self):
env = os.environ.copy()
# Note that we put the working directory in the project roots to check that when expanded
# the relative file that doesn't exist is still considered a library file.
env["IDE_PROJECT_ROOTS"] = os.path.dirname(self.TEST_FILE) + os.pathsep + os.path.abspath('.')
return env
def update_command_line_args(writer, args):
ret = debugger_unittest.AbstractWriterThread.update_command_line_args(writer, args)
if target == 'absolute':
if sys.platform == 'win32':
ret.append('c:/temp/folder/my_filename.pyx')
else:
ret.append('/temp/folder/my_filename.pyx')
elif target == 'relative':
ret.append('folder/my_filename.pyx')
else:
raise AssertionError('Unhandled case: %s' % (target,))
return args
target_filename = '_debugger_case_unhandled_just_my_code.py'
with case_setup.test_file(
target_filename,
check_test_suceeded_msg=check_test_suceeded_msg,
additional_output_checks=additional_output_checks,
update_command_line_args=update_command_line_args,
get_environ=get_environ,
EXPECTED_RETURNCODE=1,
) as writer:
json_facade = JsonFacade(writer)
json_facade.write_launch(debugStdLib=False if just_my_code else True)
json_facade.write_set_exception_breakpoints(['uncaught'])
json_facade.write_make_initial_run()
json_hit = json_facade.wait_for_thread_stopped(reason='exception')
frames = json_hit.stack_trace_response.body.stackFrames
if just_my_code:
assert len(frames) == 1
assert frames[0]['source']['path'].endswith(target_filename)
else:
assert len(frames) > 1
assert frames[0]['source']['path'].endswith('my_filename.pyx')
json_facade.write_continue()
writer.finished_ok = True
@pytest.mark.parametrize('target_file', [
'_debugger_case_unhandled_exceptions.py',
'_debugger_case_unhandled_exceptions_custom.py',

View file

@ -0,0 +1,293 @@
import Cython
from Cython.Compiler import Nodes
from Cython.Compiler.Errors import CompileError
import sys
import json
import traceback
import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def node_to_dict(node, _recurse_level=0):
_recurse_level += 1
assert _recurse_level < 5000, "It seems we are recursing..."
node_name = node.__class__.__name__
# print((' ' * _recurse_level) + node_name)
if node_name.endswith("Node"):
node_name = node_name[:-4]
data = {"__node__": node_name}
if _recurse_level == 1:
data['__version__'] = Cython.__version__
for attr_name, attr in [(key, value) for key, value in node.__dict__.items()]:
if attr_name in ("pos", "position"):
data["line"] = attr[1]
data["col"] = attr[2]
continue
if isinstance(attr, Nodes.Node):
data[attr_name] = node_to_dict(attr, _recurse_level)
elif isinstance(attr, (list, tuple)):
lst = []
for x in attr:
if isinstance(x, Nodes.Node):
lst.append(node_to_dict(x, _recurse_level))
elif isinstance(x, (bytes, str)):
lst.append(x)
elif hasattr(x, 'encode'):
lst.append(x.encode('utf-8', 'replace'))
elif isinstance(x, (list, tuple)):
tup = []
for y in x:
if isinstance(y, (str, bytes)):
tup.append(y)
elif isinstance(y, Nodes.Node):
tup.append(node_to_dict(y, _recurse_level))
lst.append(tup)
data[attr_name] = lst
else:
data[attr_name] = str(attr)
return data
def source_to_dict(source, name=None):
from Cython.Compiler.TreeFragment import parse_from_strings, StatListNode
# Right now we don't collect errors, but leave the API compatible already.
collected_errors = []
try:
# Note: we don't use TreeFragment because it formats the code removing empty lines
# (which ends up creating an AST with wrong lines).
if not name:
name = "(tree fragment)"
mod = t = parse_from_strings(name, source)
t = t.body # Make sure a StatListNode is at the top
if not isinstance(t, StatListNode):
t = StatListNode(pos=mod.pos, stats=[t])
root = t
except CompileError as e:
return {
'ast': None,
'errors': [node_to_dict(e)]
}
except BaseException as e:
as_dict = {
'ast': None,
'errors': [{
'__node__': 'CompileError', 'line': 1, 'col': 1, 'message_only': str(e)
}]
}
return as_dict
result = {'ast': node_to_dict(root), 'errors': [node_to_dict(e) for e in collected_errors]}
return result
from _pydev_bundle import pydev_localhost
HOST = pydev_localhost.get_localhost() # Symbolic name meaning the local host
IS_PYTHON_3_ONWARDS = sys.version_info[0] >= 3
def dbg(s):
sys.stderr.write('%s\n' % (s,))
# f = open('c:/temp/test.txt', 'a')
# print_ >> f, s
# f.close()
SERVER_NAME = 'CythonJson'
class Exit(Exception):
pass
class CythonJsonServer(object):
def __init__(self, port):
self.ended = False
self._buffer = b''
self.port = port
self.socket = None # socket to send messages.
self.exit_process_on_kill = True
def emulated_sendall(self, msg):
MSGLEN = 1024 * 20
totalsent = 0
while totalsent < MSGLEN:
sent = self.socket.send(msg[totalsent:])
if sent == 0:
return
totalsent = totalsent + sent
def send(self, msg):
if not isinstance(msg, bytes):
msg = msg.encode('utf-8', 'replace')
if not hasattr(self.socket, 'sendall'):
# Older versions (jython 2.1)
self.emulated_sendall(msg)
else:
if IS_PYTHON_3_ONWARDS:
self.socket.sendall(msg)
else:
self.socket.sendall(msg)
def connect_to_server(self):
from _pydev_imps._pydev_saved_modules import socket
self.socket = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((HOST, self.port))
except:
sys.stderr.write('Error on connect_to_server with parameters: host: %s port: %s\n' % (HOST, self.port))
raise
def _read(self, size):
while True:
buffer_len = len(self._buffer)
if buffer_len == size:
ret = self._buffer
self._buffer = b''
return ret
if buffer_len > size:
ret = self._buffer[:size]
self._buffer = self._buffer[size:]
return ret
try:
r = self.socket.recv(max(size - buffer_len, 1024))
except OSError:
return b''
if not r:
return b''
self._buffer += r
def _read_line(self):
while True:
i = self._buffer.find(b'\n')
if i != -1:
i += 1 # Add the newline to the return
ret = self._buffer[:i]
self._buffer = self._buffer[i:]
return ret
else:
try:
r = self.socket.recv(1024)
except OSError:
return b''
if not r:
return b''
self._buffer += r
def process_command(self, json_contents):
try:
as_dict = json.loads(json_contents)
if as_dict['command'] == 'cython_to_json_ast':
contents = as_dict['contents']
as_dict = source_to_dict(contents)
result = as_dict
else:
result = {'command': '<unexpected>', 'received': json_contents}
except:
try:
from StringIO import StringIO
except:
from io import StringIO
s = StringIO()
traceback.print_exc(file=s)
result = {'command': '<errored>', 'error': s.getvalue()}
return json.dumps(result)
def run(self):
# Echo server program
try:
dbg(SERVER_NAME + ' connecting to java server on %s (%s)' % (HOST, self.port))
# after being connected, create a socket as a client.
self.connect_to_server()
dbg(SERVER_NAME + ' Connected to java server')
content_len = -1
while True:
dbg('Will read line...')
line = self._read_line()
dbg('Read: %s' % (line,))
if not line:
raise Exit()
if line.startswith(b'Content-Length:'):
content_len = int(line.strip().split(b':', 1)[1])
dbg('Found content len: %s' % (content_len,))
continue
if content_len != -1:
# If we previously received a content length, read until a '\r\n'.
if line == b'\r\n':
dbg('Will read contents (%s)...' % (content_len,))
json_contents = self._read(content_len)
dbg('Read: %s' % (json_contents,))
content_len = -1
if len(json_contents) == 0:
raise Exit()
# We just received a json message, let's process it.
dbg('Will process...')
output = self.process_command(json_contents)
if not isinstance(output, bytes):
output = output.encode('utf-8', 'replace')
self.send('Content-Length: %s\r\n\r\n' % (len(output),))
self.send(output)
continue
except Exit:
sys.exit(0)
except:
traceback.print_exc()
raise
if __name__ == '__main__':
args = sys.argv[1:]
if args == ['-']:
# Read from stdin/dump to stdout
if sys.version_info < (3,):
stdin_get_value = sys.stdin.read
else:
stdin_get_value = sys.stdin.buffer.read
source = stdin_get_value()
# After reading, convert to unicode (use the stdout encoding)
source = source.decode(sys.stdout.encoding, 'replace')
as_dict = source_to_dict(source)
print(json.dumps(as_dict, indent=4))
sys.stdout.flush()
else:
# start as server
port = int(sys.argv[1]) # this is from where we want to receive messages.
t = CythonJsonServer(port)
dbg(SERVER_NAME + ' will start')
t.run()

View file

@ -0,0 +1,85 @@
import Cython
from cython_json import source_to_dict
import pytest
import json
def test_dump_ast_error():
as_dict = source_to_dict(u"x = [a 10]")
errors = as_dict['errors']
assert len(errors) == 1
error = errors[0]
assert error['__node__'] == 'CompileError'
assert error['line'] == 1
assert error['col'] == 8
assert 'Expected' in error['message_only']
def test_dump_error():
contents = u'''
from distutils import sysconfig
'''
if isinstance(contents, bytes):
contents = contents.decode('utf-8')
source_to_dict(contents)
def test_global():
contents = u'''
def method():
global b
b = 10
'''
if isinstance(contents, bytes):
contents = contents.decode('utf-8')
source_to_dict(contents)
# def test_dump_custom():
# with open(r'X:\cython\tests\compile\buildenv.pyx', 'r') as stream:
# contents = stream.read().decode('utf-8')
# source_to_dict(contents)
def test_dump_ast():
data = source_to_dict(u"x = [a, 10]")
assert not data['errors']
assert data['ast']['stats'] == [
{
"__node__": "SingleAssignment",
"rhs": {
"__node__": "List",
"line": 1,
"args": [
{
"__node__": "Name",
"line": 1,
"col": 5,
"name": "a"
},
{
"is_c_literal": "None",
"unsigned": "",
"value": "10",
"constant_result": "10",
"__node__": "Int",
"line": 1,
"type": "long",
"col": 8,
"longness": ""
}
],
"col": 4
},
"lhs": {
"__node__": "Name",
"line": 1,
"col": 0,
"name": "x"
},
"line": 1,
"col": 4
}
]
if __name__ == '__main__':
pytest.main()

View file

@ -63,6 +63,7 @@ def test_exceptions_and_exclude_rules(pyfile, target, run, scenario, exc_type):
@pytest.mark.parametrize("scenario", ["exclude_code_to_debug", "exclude_callback_dir"])
def test_exceptions_and_partial_exclude_rules(pyfile, target, run, scenario):
@pyfile
def code_to_debug():
from debug_me import backchannel
@ -123,9 +124,22 @@ def test_exceptions_and_partial_exclude_rules(pyfile, target, run, scenario):
)
# As exception unwinds the stack, we shouldn't stop at @call_me_back,
# since that line is in the excluded file. Furthermore, although the
# exception is unhandled, we shouldn't get a stop for that, either,
# because the exception is last seen in an excluded file.
# since that line is in the excluded file.
#
# Afterwards, because the exception unhandled, we'll have an additional stop
# (although the unhandled exception is last seen in an excluded file, we'll
# show it if it has a non-excluded file in the stack).
session.request_continue()
stop = session.wait_for_stop(
"exception",
expected_frames=[
some.dap.frame(
some.dap.source(call_me_back_py),
line=call_me_back_py.lines["callback"],
)
],
)
session.request_continue()
elif scenario == "exclude_callback_dir":