Exclude files from being debugged. #997 (#1119)

This commit is contained in:
Fabio Zadrozny 2019-01-31 18:27:14 -02:00 committed by Karthik Nadig
parent 49721a70e2
commit 4f7abaddd9
32 changed files with 5613 additions and 4864 deletions

View file

@ -1,6 +1,5 @@
from _pydevd_bundle.pydevd_constants import USE_LIB_COPY, izip
try:
try:
if USE_LIB_COPY:
@ -12,7 +11,6 @@ try:
except ImportError:
from _pydev_imps import _pydev_xmlrpclib as xmlrpclib
try:
try:
if USE_LIB_COPY:
@ -25,28 +23,23 @@ try:
except ImportError:
from _pydev_imps._pydev_SimpleXMLRPCServer import SimpleXMLRPCServer
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
try:
execfile=execfile #Not in Py3k
execfile = execfile # Not in Py3k
except NameError:
from _pydev_imps._pydev_execfile import execfile
try:
if USE_LIB_COPY:
from _pydev_imps._pydev_saved_modules import _queue
else:
import Queue as _queue
except:
import queue as _queue #@UnresolvedImport
import queue as _queue # @UnresolvedImport
try:
from _pydevd_bundle.pydevd_exec import Exec
@ -56,5 +49,5 @@ except:
try:
from urllib import quote, quote_plus, unquote_plus
except:
from urllib.parse import quote, quote_plus, unquote_plus #@UnresolvedImport
from urllib.parse import quote, quote_plus, unquote_plus # @UnresolvedImport

View file

@ -3,23 +3,23 @@ from _pydevd_bundle.pydevd_constants import DebugInfoHolder
from _pydev_imps._pydev_saved_modules import threading
currentThread = threading.currentThread
import traceback
WARN_ONCE_MAP = {}
def stderr_write(message):
sys.stderr.write(message)
sys.stderr.write("\n")
def debug(message):
if DebugInfoHolder.DEBUG_TRACE_LEVEL>2:
if DebugInfoHolder.DEBUG_TRACE_LEVEL > 2:
stderr_write(message)
def warn(message):
if DebugInfoHolder.DEBUG_TRACE_LEVEL>1:
if DebugInfoHolder.DEBUG_TRACE_LEVEL > 1:
stderr_write(message)

View file

@ -1,10 +1,10 @@
from _pydevd_bundle._debug_adapter.pydevd_schema_log import debug_exception
import json
class BaseSchema(object):
def to_json(self):
import json
return json.dumps(self.to_dict())
@ -79,11 +79,13 @@ def from_dict(dct):
def from_json(json_msg):
if isinstance(json_msg, bytes):
json_msg = json_msg.decode('utf-8')
import json
return from_dict(json.loads(json_msg))
def get_response_class(request):
if request.__class__ == dict:
return _responses_to_types[request['command']]
return _responses_to_types[request.command]

View file

@ -49,6 +49,9 @@ class PyDevdAPI(object):
return py_db.cmd_factory.make_version_message(seq)
def send_error_message(self, py_db, msg):
sys.stderr.write('pydevd: %s\n' % (msg,))
def set_show_return_values(self, py_db, show_return_values):
if show_return_values:
py_db.show_return_values = True
@ -449,3 +452,21 @@ class PyDevdAPI(object):
py_db.on_breakpoints_changed(removed=True)
def set_project_roots(self, py_db, project_roots):
'''
:param unicode project_roots:
'''
py_db.set_project_roots(project_roots)
# Add it to the namespace so that it's available as PyDevdAPI.ExcludeFilter
from _pydevd_bundle.pydevd_filtering import ExcludeFilter # noqa
def set_exclude_filters(self, py_db, exclude_filters):
'''
:param list(PyDevdAPI.ExcludeFilter) exclude_filters:
'''
py_db.set_exclude_filters(exclude_filters)
def set_use_libraries_filter(self, py_db, use_libraries_filter):
py_db.set_use_libraries_filter(use_libraries_filter)

View file

@ -104,7 +104,6 @@ except:
except:
import io as StringIO
# CMD_XXX constants imported for backward compatibility
from _pydevd_bundle.pydevd_comm_constants import * # @UnusedWildImport
@ -294,7 +293,6 @@ class ReaderThread(PyDBDaemonThread):
self.handle_except()
return # Finished communication.
# Note: the java backend is always expected to pass utf-8 encoded strings. We now work with unicode
# internally and thus, we may need to convert to the actual encoding where needed (i.e.: filenames
# on python 2 may need to be converted to the filesystem encoding).
@ -305,7 +303,6 @@ class ReaderThread(PyDBDaemonThread):
sys.stderr.write(u'debugger: received >>%s<<\n' % (line,))
sys.stderr.flush()
args = line.split(u'\t', 2)
try:
cmd_id = int(args[0])

View file

@ -236,9 +236,6 @@ try:
except:
from io import StringIO
if IS_JYTHON:
def NO_FTRACE(frame, event, arg):

File diff suppressed because it is too large Load diff

View file

@ -187,9 +187,6 @@ DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile')
TRACE_PROPERTY = 'pydevd_traceproperty.py'
#=======================================================================================================================
# PyDBFrame
#=======================================================================================================================
@ -595,7 +592,7 @@ cdef class PyDBFrame:
if can_skip:
if plugin_manager is not None and main_debugger.has_plugin_line_breaks:
can_skip = not plugin_manager.can_not_skip(main_debugger, self, frame)
can_skip = not plugin_manager.can_not_skip(main_debugger, frame)
# CMD_STEP_OVER = 108, CMD_STEP_OVER_MY_CODE = 159
if can_skip and main_debugger.show_return_values and info.pydev_step_cmd in (108, 159) and frame.f_back is info.pydev_step_stop:
@ -708,18 +705,8 @@ cdef class PyDBFrame:
#
# As for lambda, as it only has a single statement, it's not interesting to trace
# its call and later its line event as they're usually in the same line.
return self.trace_dispatch
else:
# if the frame is traced after breakpoint stop,
# but the file should be ignored while stepping because of filters
if step_cmd != -1:
if main_debugger.is_filter_enabled and main_debugger.is_ignored_by_filters(filename):
# ignore files matching stepping filters
return self.trace_dispatch
if main_debugger.is_filter_libraries and not main_debugger.in_project_scope(filename):
# ignore library files while stepping
return self.trace_dispatch
return self.trace_dispatch
if main_debugger.show_return_values:
if is_return and info.pydev_step_cmd in (CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE) and frame.f_back == info.pydev_step_stop:
@ -776,30 +763,23 @@ cdef class PyDBFrame:
if should_skip:
stop = False
elif step_cmd == CMD_STEP_INTO:
elif step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE):
force_check_project_scope = step_cmd == CMD_STEP_INTO_MY_CODE
if is_line:
stop = True
elif is_return:
if frame.f_back is not None:
if main_debugger.get_file_type(
get_abs_path_real_path_and_base_from_frame(frame.f_back)) == main_debugger.PYDEV_FILE:
stop = False
if force_check_project_scope or main_debugger.is_files_filter_enabled:
stop = not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, force_check_project_scope)
else:
stop = True
elif is_return and frame.f_back is not None:
if main_debugger.get_file_type(
get_abs_path_real_path_and_base_from_frame(frame.f_back)) == main_debugger.PYDEV_FILE:
stop = False
else:
if force_check_project_scope or main_debugger.is_files_filter_enabled:
stop = not main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, force_check_project_scope)
else:
stop = True
if plugin_manager is not None:
result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop)
if result:
stop, plugin_stop = result
elif step_cmd == CMD_STEP_INTO_MY_CODE:
if is_line:
if main_debugger.in_project_scope(frame.f_code.co_filename):
stop = True
elif is_return and frame.f_back is not None:
if main_debugger.in_project_scope(frame.f_back.f_code.co_filename):
stop = True
else:
stop = False
if plugin_manager is not None:
result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop)
@ -918,6 +898,8 @@ from _pydev_imps._pydev_saved_modules import threading
from _pydevd_bundle.pydevd_constants import get_current_thread_id, IS_IRONPYTHON, NO_FTRACE
from _pydevd_bundle.pydevd_kill_all_pydevd_threads import kill_all_pydev_threads
from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER
from _pydevd_bundle.pydevd_comm_constants import CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_OVER, \
CMD_STEP_OVER_MY_CODE, CMD_STEP_RETURN, CMD_STEP_RETURN_MY_CODE
# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated)
from cpython.object cimport PyObject
from cpython.ref cimport Py_INCREF, Py_XDECREF
@ -925,7 +907,6 @@ from cpython.ref cimport Py_INCREF, Py_XDECREF
# from _pydevd_bundle.pydevd_frame import PyDBFrame
# ENDIF
# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated)
# cdef dict global_cache_skips
# cdef dict global_cache_frame_skips
@ -1299,9 +1280,22 @@ cdef class ThreadTracer:
# Note: it's important that the context name is also given because we may hit something once
# in the global context and another in the local context.
frame_cache_key = (frame.f_code.co_firstlineno, frame.f_code.co_name, frame.f_code.co_filename)
if not is_stepping and frame_cache_key in cache_skips:
# if DEBUG: print('skipped: trace_dispatch (cache hit)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name)
return None if event == 'call' else NO_FTRACE
if frame_cache_key in cache_skips:
if not is_stepping:
# if DEBUG: print('skipped: trace_dispatch (cache hit)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name)
return None if event == 'call' else NO_FTRACE
else:
# When stepping we can't take into account caching based on the breakpoints (only global filtering).
if cache_skips.get(frame_cache_key) == 1:
back_frame = frame.f_back
if back_frame is not None and pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_RETURN, CMD_STEP_RETURN_MY_CODE):
back_frame_cache_key = (back_frame.f_code.co_firstlineno, back_frame.f_code.co_name, back_frame.f_code.co_filename)
if cache_skips.get(back_frame_cache_key) == 1:
# if DEBUG: print('skipped: trace_dispatch (cache hit: 1)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name)
return None if event == 'call' else NO_FTRACE
else:
# if DEBUG: print('skipped: trace_dispatch (cache hit: 2)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name)
return None if event == 'call' else NO_FTRACE
try:
# Make fast path faster!
@ -1323,13 +1317,20 @@ cdef class ThreadTracer:
cache_skips[frame_cache_key] = 1
return None if event == 'call' else NO_FTRACE
if is_stepping:
if py_db.is_filter_enabled and py_db.is_ignored_by_filters(filename):
# ignore files matching stepping filters
return None if event == 'call' else NO_FTRACE
if py_db.is_filter_libraries and not py_db.in_project_scope(filename):
# ignore library files while stepping
return None if event == 'call' else NO_FTRACE
if py_db.is_files_filter_enabled:
if py_db.apply_files_filter(frame, filename, False):
cache_skips[frame_cache_key] = 1
# A little gotcha, sometimes when we're stepping in we have to stop in a
# return event showing the back frame as the current frame, so, we need
# to check not only the current frame but the back frame too.
back_frame = frame.f_back
if back_frame is not None and pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_RETURN, CMD_STEP_RETURN_MY_CODE):
if py_db.apply_files_filter(back_frame, back_frame.f_code.co_filename, False):
back_frame_cache_key = (back_frame.f_code.co_firstlineno, back_frame.f_code.co_name, back_frame.f_code.co_filename)
cache_skips[back_frame_cache_key] = 1
return None if event == 'call' else NO_FTRACE
else:
return None if event == 'call' else NO_FTRACE
# if DEBUG: print('trace_dispatch', filename, frame.f_lineno, event, frame.f_code.co_name, file_type)
if additional_info.is_tracing:
@ -1343,7 +1344,9 @@ cdef class ThreadTracer:
)
).trace_dispatch(frame, event, arg)
if ret is None:
cache_skips[frame_cache_key] = 1
# 1 means skipped because of filters.
# 2 means skipped because no breakpoints were hit.
cache_skips[frame_cache_key] = 2
return None if event == 'call' else NO_FTRACE
# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated)

View file

@ -83,6 +83,7 @@ DONT_TRACE = {
'pydevd_extension_api.py': PYDEV_FILE,
'pydevd_extension_utils.py': PYDEV_FILE,
'pydevd_file_utils.py': PYDEV_FILE,
'pydevd_filtering.py': PYDEV_FILE,
'pydevd_frame.py': PYDEV_FILE,
'pydevd_frame_eval_cython_wrapper.py': PYDEV_FILE,
'pydevd_frame_eval_main.py': PYDEV_FILE,

View file

@ -0,0 +1,287 @@
import fnmatch
import glob
import os.path
import sys
from _pydev_bundle import pydev_log
import pydevd_file_utils
import json
from collections import namedtuple
try:
xrange # noqa
except NameError:
xrange = range # noqa
ExcludeFilter = namedtuple('ExcludeFilter', 'name, exclude, is_path')
def _convert_to_str_and_clear_empty(roots):
if sys.version_info[0] <= 2:
# In py2 we need bytes for the files.
roots = [
root if not isinstance(root, unicode) else root.encode(sys.getfilesystemencoding())
for root in roots
]
new_roots = []
for root in roots:
assert isinstance(root, str), '%s not str (found: %s)' % (root, type(root))
if root:
new_roots.append(root)
return new_roots
def _check_matches(patterns, paths):
if not patterns and not paths:
# Matched to the end.
return True
if (not patterns and paths) or (patterns and not paths):
return False
pattern = patterns[0]
path = paths[0]
if not glob.has_magic(pattern):
if pattern != path:
return False
elif pattern == '**':
if len(patterns) == 1:
return True # if ** is the last one it matches anything to the right.
for i in xrange(len(paths)):
# Recursively check the remaining patterns as the
# current pattern could match any number of paths.
if _check_matches(patterns[1:], paths[i:]):
return True
elif not fnmatch.fnmatch(path, pattern):
# Current part doesn't match.
return False
return _check_matches(patterns[1:], paths[1:])
def glob_matches_path(path, pattern, sep=os.sep, altsep=os.altsep):
if altsep:
pattern = pattern.replace(altsep, sep)
path = path.replace(altsep, sep)
drive = ''
if len(path) > 1 and path[1] == ':':
drive, path = path[0], path[2:]
if drive and len(pattern) > 1:
if pattern[1] == ':':
if drive.lower() != pattern[0].lower():
return False
pattern = pattern[2:]
patterns = pattern.split(sep)
paths = path.split(sep)
if paths:
if paths[0] == '':
paths = paths[1:]
if patterns:
if patterns[0] == '':
patterns = patterns[1:]
return _check_matches(patterns, paths)
class FilesFiltering(object):
'''
Note: calls at FilesFiltering are uncached.
The actual API used should be through PyDB.
'''
def __init__(self):
self._exclude_filters = []
self._project_roots = []
self._library_roots = []
# Filter out libraries?
self._use_libraries_filter = False
self.require_module = False # True if some exclude filter filters by the module.
self.set_use_libraries_filter(os.getenv('PYDEVD_FILTER_LIBRARIES') is not None)
project_roots = os.getenv('IDE_PROJECT_ROOTS', None)
if project_roots is not None:
project_roots = project_roots.split(os.pathsep)
else:
project_roots = []
self.set_project_roots(project_roots)
library_roots = os.getenv('LIBRARY_ROOTS', None)
if library_roots is not None:
library_roots = library_roots.split(os.pathsep)
else:
library_roots = self._get_default_library_roots()
self.set_library_roots(library_roots)
# Stepping filters.
pydevd_filters = os.getenv('PYDEVD_FILTERS', '')
if pydevd_filters.startswith('{'):
# dict(glob_pattern (str) -> exclude(True or False))
exclude_filters = []
for key, val in json.loads(pydevd_filters).items():
exclude_filters.append(ExcludeFilter(key, val, True))
self._exclude_filters = exclude_filters
else:
# A ';' separated list of strings with globs for the
# list of excludes.
filters = pydevd_filters.split(';')
pydev_log.debug("PYDEVD_FILTERS %s\n" % filters)
new_filters = []
for new_filter in filters:
if new_filter.strip():
new_filters.append(ExcludeFilter(new_filter.strip(), True, True))
self._exclude_filters = new_filters
@classmethod
def _get_default_library_roots(cls):
# Provide sensible defaults if not in env vars.
import site
roots = [sys.prefix]
if hasattr(sys, 'base_prefix'):
roots.append(sys.base_prefix)
if hasattr(sys, 'real_prefix'):
roots.append(sys.real_prefix)
if hasattr(site, 'getusersitepackages'):
site_paths = site.getusersitepackages()
if isinstance(site_paths, (list, tuple)):
for site_path in site_paths:
roots.append(site_path)
else:
roots.append(site_paths)
if hasattr(site, 'getsitepackages'):
site_paths = site.getsitepackages()
if isinstance(site_paths, (list, tuple)):
for site_path in site_paths:
roots.append(site_path)
else:
roots.append(site_paths)
for path in sys.path:
if os.path.exists(path) and os.path.basename(path) == 'site-packages':
roots.append(path)
return sorted(set(roots))
def _normpath(self, filename):
return pydevd_file_utils.get_abs_path_real_path_and_base_from_file(filename)[0]
def _fix_roots(self, roots):
roots = _convert_to_str_and_clear_empty(roots)
new_roots = []
for root in roots:
new_roots.append(self._normpath(root))
return new_roots
def set_project_roots(self, project_roots):
self._project_roots = self._fix_roots(project_roots)
pydev_log.debug("IDE_PROJECT_ROOTS %s\n" % project_roots)
def _get_project_roots(self):
return self._project_roots
def set_library_roots(self, roots):
self._library_roots = self._fix_roots(roots)
pydev_log.debug("LIBRARY_ROOTS %s\n" % roots)
def _get_library_roots(self):
return self._library_roots
def in_project_roots(self, filename):
'''
Note: don't call directly. Use PyDb.in_project_scope (no caching here).
'''
project_roots = self._get_project_roots()
if not filename.endswith('>'):
filename = self._normpath(filename)
found_in_project = []
for root in project_roots:
if root and filename.startswith(root):
found_in_project.append(root)
found_in_library = []
library_roots = self._get_library_roots()
for root in library_roots:
if root and filename.startswith(root):
found_in_library.append(root)
if not project_roots:
# If we have no project roots configured, consider it being in the project
# roots if it's not found in site-packages (because we have defaults for those
# and not the other way around).
if filename.endswith('>'):
if filename.endswith('<string>'):
# When `python -c <code>` is used, the filename
# endswith <string>.
in_project = True
else:
in_project = False
else:
in_project = not found_in_library
else:
in_project = False
if found_in_project:
if not found_in_library:
in_project = True
else:
# Found in both, let's see which one has the bigger path matched.
if max(len(x) for x in found_in_project) > max(len(x) for x in found_in_library):
in_project = True
return in_project
def use_libraries_filter(self):
'''
Should we debug only what's inside project folders?
'''
return self._use_libraries_filter
def set_use_libraries_filter(self, use):
pydev_log.debug("pydevd: Use libraries filter: %s\n" % use)
self._use_libraries_filter = use
def use_exclude_filters(self):
# Enabled if we have any filters registered.
return len(self._exclude_filters) > 0
def exclude_by_filter(self, filename, module_name):
'''
:return: True if it should be excluded, False if it should be included and None
if no rule matched the given file.
'''
for exclude_filter in self._exclude_filters: # : :type exclude_filter: ExcludeFilter
if exclude_filter.is_path:
if glob_matches_path(filename, exclude_filter.name):
if exclude_filter.exclude:
pydev_log.debug("File %s ignored by filter %s" % (filename, exclude_filter.name))
return exclude_filter.exclude
else:
# Module filter.
if exclude_filter.name == module_name or module_name.startswith(exclude_filter.name + '.'):
return exclude_filter.exclude
return None
def set_exclude_filters(self, exclude_filters):
'''
:param list(ExcludeFilter) exclude_filters:
'''
self._exclude_filters = exclude_filters
self.require_module = False
for exclude_filter in exclude_filters:
if not exclude_filter.is_path:
self.require_module = True
break

View file

@ -36,9 +36,6 @@ DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile')
TRACE_PROPERTY = 'pydevd_traceproperty.py'
#=======================================================================================================================
# PyDBFrame
#=======================================================================================================================
@ -444,7 +441,7 @@ class PyDBFrame:
if can_skip:
if plugin_manager is not None and main_debugger.has_plugin_line_breaks:
can_skip = not plugin_manager.can_not_skip(main_debugger, self, frame)
can_skip = not plugin_manager.can_not_skip(main_debugger, frame)
# CMD_STEP_OVER = 108, CMD_STEP_OVER_MY_CODE = 159
if can_skip and main_debugger.show_return_values and info.pydev_step_cmd in (108, 159) and frame.f_back is info.pydev_step_stop:
@ -557,18 +554,8 @@ class PyDBFrame:
#
# As for lambda, as it only has a single statement, it's not interesting to trace
# its call and later its line event as they're usually in the same line.
return self.trace_dispatch
else:
# if the frame is traced after breakpoint stop,
# but the file should be ignored while stepping because of filters
if step_cmd != -1:
if main_debugger.is_filter_enabled and main_debugger.is_ignored_by_filters(filename):
# ignore files matching stepping filters
return self.trace_dispatch
if main_debugger.is_filter_libraries and not main_debugger.in_project_scope(filename):
# ignore library files while stepping
return self.trace_dispatch
return self.trace_dispatch
if main_debugger.show_return_values:
if is_return and info.pydev_step_cmd in (CMD_STEP_OVER, CMD_STEP_OVER_MY_CODE) and frame.f_back == info.pydev_step_stop:
@ -625,30 +612,23 @@ class PyDBFrame:
if should_skip:
stop = False
elif step_cmd == CMD_STEP_INTO:
elif step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE):
force_check_project_scope = step_cmd == CMD_STEP_INTO_MY_CODE
if is_line:
stop = True
elif is_return:
if frame.f_back is not None:
if main_debugger.get_file_type(
get_abs_path_real_path_and_base_from_frame(frame.f_back)) == main_debugger.PYDEV_FILE:
stop = False
if force_check_project_scope or main_debugger.is_files_filter_enabled:
stop = not main_debugger.apply_files_filter(frame, frame.f_code.co_filename, force_check_project_scope)
else:
stop = True
elif is_return and frame.f_back is not None:
if main_debugger.get_file_type(
get_abs_path_real_path_and_base_from_frame(frame.f_back)) == main_debugger.PYDEV_FILE:
stop = False
else:
if force_check_project_scope or main_debugger.is_files_filter_enabled:
stop = not main_debugger.apply_files_filter(frame.f_back, frame.f_back.f_code.co_filename, force_check_project_scope)
else:
stop = True
if plugin_manager is not None:
result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop)
if result:
stop, plugin_stop = result
elif step_cmd == CMD_STEP_INTO_MY_CODE:
if is_line:
if main_debugger.in_project_scope(frame.f_code.co_filename):
stop = True
elif is_return and frame.f_back is not None:
if main_debugger.in_project_scope(frame.f_back.f_code.co_filename):
stop = True
else:
stop = False
if plugin_manager is not None:
result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop)

View file

@ -43,7 +43,6 @@ class _PyDevCommandProcessor(object):
py_db._main_lock.acquire()
try:
cmd = on_command(py_db, cmd_id, seq, text)
if cmd is not None:
py_db.writer.add_command(cmd)
@ -558,7 +557,7 @@ class _PyDevCommandProcessor(object):
thread_id, internal_get_next_statement_targets, seq, thread_id, frame_id)
def cmd_set_project_roots(self, py_db, cmd_id, seq, text):
pydevd_utils.set_project_roots(text.split(u'\t'))
self.api.set_project_roots(py_db, text.split(u'\t'))
def cmd_thread_dump_to_stderr(self, py_db, cmd_id, seq, text):
pydevd_utils.dump_threads()

View file

@ -1,14 +1,76 @@
from functools import reduce, partial
import itertools
import json
import os
import re
from _pydevd_bundle._debug_adapter import pydevd_base_schema
from _pydevd_bundle._debug_adapter.pydevd_schema import SourceBreakpoint
from _pydevd_bundle.pydevd_api import PyDevdAPI
from _pydevd_bundle.pydevd_comm_constants import CMD_RETURN
from _pydevd_bundle.pydevd_net_command import NetCommand
from _pydevd_bundle._debug_adapter.pydevd_schema import SourceBreakpoint
import itertools
from _pydevd_bundle.pydevd_filtering import ExcludeFilter
from _pydevd_bundle.pydevd_json_debug_options import _extract_debug_options
from _pydevd_bundle.pydevd_net_command import NetCommand
def _convert_rules_to_exclude_filters(rules, filename_to_server, on_error):
exclude_filters = []
if not isinstance(rules, list):
on_error('Invalid "rules" (expected list of dicts). Found: %s' % (rules,))
else:
directory_exclude_filters = []
module_exclude_filters = []
glob_exclude_filters = []
for rule in rules:
if not isinstance(rule, dict):
on_error('Invalid "rules" (expected list of dicts). Found: %s' % (rules,))
continue
include = rule.get('include')
if include is None:
on_error('Invalid "rule" (expected dict with "include"). Found: %s' % (rule,))
continue
path = rule.get('path')
module = rule.get('module')
if path is None and module is None:
on_error('Invalid "rule" (expected dict with "path" or "module"). Found: %s' % (rule,))
continue
if path is not None:
glob_pattern = path
if '*' not in path and '?' not in path:
path = filename_to_server(path)
if os.path.isdir(glob_pattern):
# If a directory was specified, add a '/**'
# to be consistent with the glob pattern required
# by pydevd.
if not glob_pattern.endswith('/') and not glob_pattern.endswith('\\'):
glob_pattern += '/'
glob_pattern += '**'
directory_exclude_filters.append(ExcludeFilter(glob_pattern, not include, True))
else:
glob_exclude_filters.append(ExcludeFilter(glob_pattern, not include, True))
elif module is not None:
module_exclude_filters.append(ExcludeFilter(module, not include, False))
else:
on_error('Internal error: expected path or module to be specified.')
# Note that we have to sort the directory/module exclude filters so that the biggest
# paths match first.
# i.e.: if we have:
# /sub1/sub2/sub3
# a rule with /sub1/sub2 would match before a rule only with /sub1.
directory_exclude_filters = sorted(directory_exclude_filters, key=lambda exclude_filter:-len(exclude_filter.name))
module_exclude_filters = sorted(module_exclude_filters, key=lambda exclude_filter:-len(exclude_filter.name))
exclude_filters = directory_exclude_filters + glob_exclude_filters + module_exclude_filters
return exclude_filters
class _PyDevJsonCommandProcessor(object):
@ -16,7 +78,7 @@ class _PyDevJsonCommandProcessor(object):
def __init__(self, from_json):
self.from_json = from_json
self.api = PyDevdAPI()
self.debug_options = {}
self._debug_options = {}
self._next_breakpoint_id = partial(next, itertools.count(0))
def process_net_command_json(self, py_db, json_contents):
@ -85,17 +147,28 @@ class _PyDevJsonCommandProcessor(object):
self.api.request_completions(py_db, seq, thread_id, frame_id, text, line=line, column=column)
def _set_debug_options(self, args):
self.debug_options = _extract_debug_options(
def _set_debug_options(self, py_db, args):
rules = args.get('rules')
exclude_filters = []
if rules is not None:
exclude_filters = _convert_rules_to_exclude_filters(
rules, self.api.filename_to_server, lambda msg:self.api.send_error_message(py_db, msg))
self.api.set_exclude_filters(py_db, exclude_filters)
self._debug_options = _extract_debug_options(
args.get('options'),
args.get('debugOptions'),
)
debug_stdlib = self._debug_options.get('DEBUG_STDLIB', False)
self.api.set_use_libraries_filter(py_db, not debug_stdlib)
def on_launch_request(self, py_db, request):
'''
:param LaunchRequest request:
'''
self._set_debug_options(request.arguments.kwargs)
self._set_debug_options(py_db, request.arguments.kwargs)
response = pydevd_base_schema.build_response(request)
return NetCommand(CMD_RETURN, 0, response.to_dict(), is_json=True)
@ -103,7 +176,7 @@ class _PyDevJsonCommandProcessor(object):
'''
:param AttachRequest request:
'''
self._set_debug_options(request.arguments.kwargs)
self._set_debug_options(py_db, request.arguments.kwargs)
response = pydevd_base_schema.build_response(request)
return NetCommand(CMD_RETURN, 0, response.to_dict(), is_json=True)
@ -161,9 +234,9 @@ class _PyDevJsonCommandProcessor(object):
suspend_policy = 'ALL'
if not filename.lower().endswith('.py'):
if self.debug_options.get('DJANGO_DEBUG', False):
if self._debug_options.get('DJANGO_DEBUG', False):
btype = 'django-line'
elif self.debug_options.get('FLASK_DEBUG', False):
elif self._debug_options.get('FLASK_DEBUG', False):
btype = 'jinja2-line'
breakpoints_set = []

View file

@ -4,17 +4,17 @@ try:
except ImportError:
pass
else:
trace._warn = lambda *args: None # workaround for http://bugs.python.org/issue17143 (PY-8706)
trace._warn = lambda *args: None # workaround for http://bugs.python.org/issue17143 (PY-8706)
import os
from _pydevd_bundle.pydevd_comm import CMD_SIGNATURE_CALL_TRACE, NetCommand
from _pydevd_bundle import pydevd_xml
from _pydevd_bundle.pydevd_constants import xrange, dict_iter_items
from _pydevd_bundle import pydevd_utils
from _pydevd_bundle.pydevd_utils import get_clsname_for_code
class Signature(object):
def __init__(self, file, name):
self.file = file
self.name = name
@ -24,7 +24,7 @@ class Signature(object):
def add_arg(self, name, type):
self.args.append((name, type))
self.args_str.append("%s:%s"%(name, type))
self.args_str.append("%s:%s" % (name, type))
def set_args(self, frame, recursive=False):
self.args = []
@ -39,7 +39,7 @@ class Signature(object):
self.add_arg(name, class_name)
def __str__(self):
return "%s %s(%s)"%(self.file, self.name, ", ".join(self.args_str))
return "%s %s(%s)" % (self.file, self.name, ", ".join(self.args_str))
def get_type_of_value(value, ignore_module_name=('__main__', '__builtin__', 'builtins'), recursive=False):
@ -50,7 +50,7 @@ def get_type_of_value(value, ignore_module_name=('__main__', '__builtin__', 'bui
class_name = tp.__name__
if hasattr(tp, '__module__') and tp.__module__ and tp.__module__ not in ignore_module_name:
class_name = "%s.%s"%(tp.__module__, class_name)
class_name = "%s.%s" % (tp.__module__, class_name)
if class_name == 'list':
class_name = 'List'
@ -62,7 +62,7 @@ def get_type_of_value(value, ignore_module_name=('__main__', '__builtin__', 'bui
class_name = 'Dict'
if len(value) > 0 and recursive:
for (k, v) in dict_iter_items(value):
class_name += '[%s, %s]' % (get_type_of_value(k, recursive=recursive),
class_name += '[%s, %s]' % (get_type_of_value(k, recursive=recursive),
get_type_of_value(v, recursive=recursive))
break
return class_name
@ -85,13 +85,11 @@ def _modname(path):
class SignatureFactory(object):
def __init__(self):
self._caller_cache = {}
self.cache = CallSignatureCache()
def is_in_scope(self, filename):
return pydevd_utils.in_project_roots(filename)
def create_signature(self, frame, filename, with_args=True):
try:
_, modulename, funcname = self.file_module_function_of(frame)
@ -103,8 +101,7 @@ class SignatureFactory(object):
import traceback
traceback.print_exc()
def file_module_function_of(self, frame): #this code is take from trace module and fixed to work with new-style classes
def file_module_function_of(self, frame): # this code is take from trace module and fixed to work with new-style classes
code = frame.f_code
filename = code.co_filename
if filename:
@ -143,6 +140,7 @@ def get_frame_info(frame):
class CallSignatureCache(object):
def __init__(self):
self.cache = {}
@ -166,7 +164,7 @@ def create_signature_message(signature):
for arg in signature.args:
cmdTextList.append('<arg name="%s" type="%s"></arg>' % (pydevd_xml.make_valid_xml_value(arg[0]), pydevd_xml.make_valid_xml_value(arg[1])))
if signature.return_type is not None:
cmdTextList.append('<return type="%s"></return>' % (pydevd_xml.make_valid_xml_value(signature.return_type)))
@ -176,7 +174,7 @@ def create_signature_message(signature):
def send_signature_call_trace(dbg, frame, filename):
if dbg.signature_factory and dbg.signature_factory.is_in_scope(filename):
if dbg.signature_factory and dbg.in_project_scope(filename):
signature = dbg.signature_factory.create_signature(frame, filename)
if signature is not None:
if dbg.signature_factory.cache is not None:
@ -194,7 +192,7 @@ def send_signature_call_trace(dbg, frame, filename):
def send_signature_return_trace(dbg, frame, filename, return_value):
if dbg.signature_factory and dbg.signature_factory.is_in_scope(filename):
if dbg.signature_factory and dbg.in_project_scope(filename):
signature = dbg.signature_factory.create_signature(frame, filename, with_args=False)
signature.return_type = get_type_of_value(return_value, recursive=True)
dbg.writer.add_command(create_signature_message(signature))
@ -202,5 +200,3 @@ def send_signature_return_trace(dbg, frame, filename, return_value):
return False

View file

@ -1,41 +1,54 @@
def add_line_breakpoint(plugin, pydb, type, file, line, condition, expression, func_name):
return None
def add_exception_breakpoint(plugin, pydb, type, exception):
return False
def remove_exception_breakpoint(plugin, pydb, type, exception):
return False
def get_breakpoints(plugin, pydb):
return None
def can_not_skip(plugin, pydb, pydb_frame, frame):
def can_not_skip(plugin, pydb, frame):
return False
def has_exception_breaks(plugin):
return False
def has_line_breaks(plugin):
return False
def cmd_step_into(plugin, pydb, frame, event, args, stop_info, stop):
return False
def cmd_step_over(plugin, pydb, frame, event, args, stop_info, stop):
return False
def stop(plugin, pydb, frame, event, args, stop_info, arg, step_cmd):
return False
def get_breakpoint(plugin, pydb, pydb_frame, frame, event, args):
return None
def suspend(plugin, pydb, thread, frame):
return None
def exception_break(plugin, pydb, pydb_frame, frame, args, arg):
return None
def change_variable(plugin, frame, attr, expression):
return False

View file

@ -5,6 +5,8 @@ from _pydev_imps._pydev_saved_modules import threading
from _pydevd_bundle.pydevd_constants import get_current_thread_id, IS_IRONPYTHON, NO_FTRACE
from _pydevd_bundle.pydevd_kill_all_pydevd_threads import kill_all_pydev_threads
from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER
from _pydevd_bundle.pydevd_comm_constants import CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_OVER, \
CMD_STEP_OVER_MY_CODE, CMD_STEP_RETURN, CMD_STEP_RETURN_MY_CODE
# IFDEF CYTHON
# from cpython.object cimport PyObject
# from cpython.ref cimport Py_INCREF, Py_XDECREF
@ -12,7 +14,6 @@ from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_P
from _pydevd_bundle.pydevd_frame import PyDBFrame
# ENDIF
# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated)
# cdef dict global_cache_skips
# cdef dict global_cache_frame_skips
@ -388,9 +389,22 @@ class ThreadTracer(object):
# Note: it's important that the context name is also given because we may hit something once
# in the global context and another in the local context.
frame_cache_key = (frame.f_code.co_firstlineno, frame.f_code.co_name, frame.f_code.co_filename)
if not is_stepping and frame_cache_key in cache_skips:
# if DEBUG: print('skipped: trace_dispatch (cache hit)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name)
return None if event == 'call' else NO_FTRACE
if frame_cache_key in cache_skips:
if not is_stepping:
# if DEBUG: print('skipped: trace_dispatch (cache hit)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name)
return None if event == 'call' else NO_FTRACE
else:
# When stepping we can't take into account caching based on the breakpoints (only global filtering).
if cache_skips.get(frame_cache_key) == 1:
back_frame = frame.f_back
if back_frame is not None and pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_RETURN, CMD_STEP_RETURN_MY_CODE):
back_frame_cache_key = (back_frame.f_code.co_firstlineno, back_frame.f_code.co_name, back_frame.f_code.co_filename)
if cache_skips.get(back_frame_cache_key) == 1:
# if DEBUG: print('skipped: trace_dispatch (cache hit: 1)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name)
return None if event == 'call' else NO_FTRACE
else:
# if DEBUG: print('skipped: trace_dispatch (cache hit: 2)', frame_cache_key, frame.f_lineno, event, frame.f_code.co_name)
return None if event == 'call' else NO_FTRACE
try:
# Make fast path faster!
@ -412,13 +426,20 @@ class ThreadTracer(object):
cache_skips[frame_cache_key] = 1
return None if event == 'call' else NO_FTRACE
if is_stepping:
if py_db.is_filter_enabled and py_db.is_ignored_by_filters(filename):
# ignore files matching stepping filters
return None if event == 'call' else NO_FTRACE
if py_db.is_filter_libraries and not py_db.in_project_scope(filename):
# ignore library files while stepping
return None if event == 'call' else NO_FTRACE
if py_db.is_files_filter_enabled:
if py_db.apply_files_filter(frame, filename, False):
cache_skips[frame_cache_key] = 1
# A little gotcha, sometimes when we're stepping in we have to stop in a
# return event showing the back frame as the current frame, so, we need
# to check not only the current frame but the back frame too.
back_frame = frame.f_back
if back_frame is not None and pydev_step_cmd in (CMD_STEP_INTO, CMD_STEP_INTO_MY_CODE, CMD_STEP_RETURN, CMD_STEP_RETURN_MY_CODE):
if py_db.apply_files_filter(back_frame, back_frame.f_code.co_filename, False):
back_frame_cache_key = (back_frame.f_code.co_firstlineno, back_frame.f_code.co_name, back_frame.f_code.co_filename)
cache_skips[back_frame_cache_key] = 1
return None if event == 'call' else NO_FTRACE
else:
return None if event == 'call' else NO_FTRACE
# if DEBUG: print('trace_dispatch', filename, frame.f_lineno, event, frame.f_code.co_name, file_type)
if additional_info.is_tracing:
@ -432,7 +453,9 @@ class ThreadTracer(object):
)
).trace_dispatch(frame, event, arg)
if ret is None:
cache_skips[frame_cache_key] = 1
# 1 means skipped because of filters.
# 2 means skipped because no breakpoints were hit.
cache_skips[frame_cache_key] = 2
return None if event == 'call' else NO_FTRACE
# IFDEF CYTHON

View file

@ -1,8 +1,6 @@
from __future__ import nested_scopes
import traceback
import os
import warnings
import pydevd_file_utils
try:
from urllib import quote
@ -10,16 +8,11 @@ except:
from urllib.parse import quote # @UnresolvedImport
import inspect
from _pydevd_bundle.pydevd_constants import IS_PY3K, get_global_debugger
import sys
from _pydev_bundle import pydev_log
from _pydevd_bundle.pydevd_constants import IS_PY3K
from _pydev_imps._pydev_saved_modules import threading
def _normpath(filename):
return pydevd_file_utils.get_abs_path_real_path_and_base_from_file(filename)[0]
def save_main_module(file, module_name):
# patch provided by: Scott Schlesier - when script is run, it does not
# use globals from pydevd:
@ -144,195 +137,6 @@ def get_clsname_for_code(code, frame):
return clsname
_PROJECT_ROOTS_CACHE = []
_LIBRARY_ROOTS_CACHE = []
_FILENAME_TO_IN_SCOPE_CACHE = {}
def _convert_to_str_and_clear_empty(roots):
if sys.version_info[0] <= 2:
# In py2 we need bytes for the files.
roots = [
root if not isinstance(root, unicode) else root.encode(sys.getfilesystemencoding())
for root in roots
]
new_roots = []
for root in roots:
assert isinstance(root, str), '%s not str (found: %s)' % (root, type(root))
if root:
new_roots.append(root)
return new_roots
def _clear_caches_related_to_scope_changes():
# Clear related caches.
_FILENAME_TO_IN_SCOPE_CACHE.clear()
debugger = get_global_debugger()
if debugger is not None:
debugger.clear_skip_caches()
def _set_roots(roots, cache):
roots = _convert_to_str_and_clear_empty(roots)
new_roots = []
for root in roots:
new_roots.append(_normpath(root))
cache.append(new_roots)
# Leave only the last one added.
del cache[:-1]
_clear_caches_related_to_scope_changes()
return new_roots
def _get_roots(cache, env_var, set_when_not_cached, get_default_val=None):
if not cache:
roots = os.getenv(env_var, None)
if roots is not None:
roots = roots.split(os.pathsep)
else:
if not get_default_val:
roots = []
else:
roots = get_default_val()
if not roots:
pydev_log.warn('%s being set to empty list.' % (env_var,))
set_when_not_cached(roots)
return cache[-1] # returns the roots with case normalized
def _get_default_library_roots():
# Provide sensible defaults if not in env vars.
import site
roots = [sys.prefix]
if hasattr(sys, 'base_prefix'):
roots.append(sys.base_prefix)
if hasattr(sys, 'real_prefix'):
roots.append(sys.real_prefix)
if hasattr(site, 'getusersitepackages'):
site_paths = site.getusersitepackages()
if isinstance(site_paths, (list, tuple)):
for site_path in site_paths:
roots.append(site_path)
else:
roots.append(site_paths)
if hasattr(site, 'getsitepackages'):
site_paths = site.getsitepackages()
if isinstance(site_paths, (list, tuple)):
for site_path in site_paths:
roots.append(site_path)
else:
roots.append(site_paths)
for path in sys.path:
if os.path.exists(path) and os.path.basename(path) == 'site-packages':
roots.append(path)
return sorted(set(roots))
# --- Project roots
def set_project_roots(project_roots):
project_roots = _set_roots(project_roots, _PROJECT_ROOTS_CACHE)
pydev_log.debug("IDE_PROJECT_ROOTS %s\n" % project_roots)
def _get_project_roots(project_roots_cache=_PROJECT_ROOTS_CACHE):
return _get_roots(project_roots_cache, 'IDE_PROJECT_ROOTS', set_project_roots)
# --- Library roots
def set_library_roots(roots):
roots = _set_roots(roots, _LIBRARY_ROOTS_CACHE)
pydev_log.debug("LIBRARY_ROOTS %s\n" % roots)
def _get_library_roots(library_roots_cache=_LIBRARY_ROOTS_CACHE):
return _get_roots(library_roots_cache, 'LIBRARY_ROOTS', set_library_roots, _get_default_library_roots)
def in_project_roots(filename, filename_to_in_scope_cache=_FILENAME_TO_IN_SCOPE_CACHE):
# Note: the filename_to_in_scope_cache is the same instance among the many calls to the method
try:
return filename_to_in_scope_cache[filename]
except:
project_roots = _get_project_roots()
original_filename = filename
if not filename.endswith('>'):
filename = _normpath(filename)
found_in_project = []
for root in project_roots:
if root and filename.startswith(root):
found_in_project.append(root)
found_in_library = []
library_roots = _get_library_roots()
for root in library_roots:
if root and filename.startswith(root):
found_in_library.append(root)
if not project_roots:
# If we have no project roots configured, consider it being in the project
# roots if it's not found in site-packages (because we have defaults for those
# and not the other way around).
if filename.endswith('>'):
in_project = False
else:
in_project = not found_in_library
else:
in_project = False
if found_in_project:
if not found_in_library:
in_project = True
else:
# Found in both, let's see which one has the bigger path matched.
if max(len(x) for x in found_in_project) > max(len(x) for x in found_in_library):
in_project = True
filename_to_in_scope_cache[original_filename] = in_project
return in_project
def is_filter_enabled():
return os.getenv('PYDEVD_FILTERS') is not None
def is_filter_libraries():
is_filter = os.getenv('PYDEVD_FILTER_LIBRARIES') is not None
pydev_log.debug("PYDEVD_FILTER_LIBRARIES %s\n" % is_filter)
return is_filter
def _get_stepping_filters(filters_cache=[]):
if not filters_cache:
filters = os.getenv('PYDEVD_FILTERS', '').split(';')
pydev_log.debug("PYDEVD_FILTERS %s\n" % filters)
new_filters = []
for new_filter in filters:
new_filters.append(new_filter)
filters_cache.append(new_filters)
return filters_cache[-1]
def is_ignored_by_filter(filename, filename_to_ignored_by_filters_cache={}):
try:
return filename_to_ignored_by_filters_cache[filename]
except:
import fnmatch
for stepping_filter in _get_stepping_filters():
if fnmatch.fnmatch(filename, stepping_filter):
pydev_log.debug("File %s ignored by filter %s" % (filename, stepping_filter))
filename_to_ignored_by_filters_cache[filename] = True
break
else:
filename_to_ignored_by_filters_cache[filename] = False
return filename_to_ignored_by_filters_cache[filename]
def get_non_pydevd_threads():
threads = threading.enumerate()
return [t for t in threads if t and not getattr(t, 'is_pydev_daemon_thread', False)]

View file

@ -318,7 +318,6 @@ def change_attr_expression(thread_id, frame_id, attr, expression, dbg, value=SEN
Exec('%s=%s' % (attr, expression), frame.f_globals, frame.f_locals)
return result
except Exception:
traceback.print_exc()
@ -452,7 +451,6 @@ def array_to_meta_xml(array, name, format):
return array, xml, rows, cols, format
def dataframe_to_xml(df, name, roffset, coffset, rows, cols, format):
"""
:type df: pandas.core.frame.DataFrame

View file

@ -6747,7 +6747,7 @@ static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytec
* if not func_code_info.always_skip_code:
*
* if main_debugger.has_plugin_line_breaks: # <<<<<<<<<<<<<<
* can_skip = not main_debugger.plugin.can_not_skip(main_debugger, None, <object> frame_obj)
* can_skip = not main_debugger.plugin.can_not_skip(main_debugger, <object> frame_obj)
*
*/
__pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_has_plugin_line_breaks); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 319, __pyx_L23_error)
@ -6759,7 +6759,7 @@ static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytec
/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":320
*
* if main_debugger.has_plugin_line_breaks:
* can_skip = not main_debugger.plugin.can_not_skip(main_debugger, None, <object> frame_obj) # <<<<<<<<<<<<<<
* can_skip = not main_debugger.plugin.can_not_skip(main_debugger, <object> frame_obj) # <<<<<<<<<<<<<<
*
* if not can_skip:
*/
@ -6782,22 +6782,22 @@ static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytec
}
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(__pyx_t_9)) {
PyObject *__pyx_temp[4] = {__pyx_t_2, __pyx_v_main_debugger, Py_None, ((PyObject *)__pyx_v_frame_obj)};
__pyx_t_10 = __Pyx_PyFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 320, __pyx_L23_error)
PyObject *__pyx_temp[3] = {__pyx_t_2, __pyx_v_main_debugger, ((PyObject *)__pyx_v_frame_obj)};
__pyx_t_10 = __Pyx_PyFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 320, __pyx_L23_error)
__Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
__Pyx_GOTREF(__pyx_t_10);
} else
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(__pyx_t_9)) {
PyObject *__pyx_temp[4] = {__pyx_t_2, __pyx_v_main_debugger, Py_None, ((PyObject *)__pyx_v_frame_obj)};
__pyx_t_10 = __Pyx_PyCFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 320, __pyx_L23_error)
PyObject *__pyx_temp[3] = {__pyx_t_2, __pyx_v_main_debugger, ((PyObject *)__pyx_v_frame_obj)};
__pyx_t_10 = __Pyx_PyCFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_11, 2+__pyx_t_11); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 320, __pyx_L23_error)
__Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
__Pyx_GOTREF(__pyx_t_10);
} else
#endif
{
__pyx_t_8 = PyTuple_New(3+__pyx_t_11); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 320, __pyx_L23_error)
__pyx_t_8 = PyTuple_New(2+__pyx_t_11); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 320, __pyx_L23_error)
__Pyx_GOTREF(__pyx_t_8);
if (__pyx_t_2) {
__Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_2); __pyx_t_2 = NULL;
@ -6805,12 +6805,9 @@ static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytec
__Pyx_INCREF(__pyx_v_main_debugger);
__Pyx_GIVEREF(__pyx_v_main_debugger);
PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_11, __pyx_v_main_debugger);
__Pyx_INCREF(Py_None);
__Pyx_GIVEREF(Py_None);
PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_11, Py_None);
__Pyx_INCREF(((PyObject *)__pyx_v_frame_obj));
__Pyx_GIVEREF(((PyObject *)__pyx_v_frame_obj));
PyTuple_SET_ITEM(__pyx_t_8, 2+__pyx_t_11, ((PyObject *)__pyx_v_frame_obj));
PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_11, ((PyObject *)__pyx_v_frame_obj));
__pyx_t_10 = __Pyx_PyObject_Call(__pyx_t_9, __pyx_t_8, NULL); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 320, __pyx_L23_error)
__Pyx_GOTREF(__pyx_t_10);
__Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
@ -6821,7 +6818,7 @@ static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytec
__pyx_v_can_skip = (!__pyx_t_4);
/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":322
* can_skip = not main_debugger.plugin.can_not_skip(main_debugger, None, <object> frame_obj)
* can_skip = not main_debugger.plugin.can_not_skip(main_debugger, <object> frame_obj)
*
* if not can_skip: # <<<<<<<<<<<<<<
* # if DEBUG:
@ -6882,7 +6879,7 @@ static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytec
__pyx_L46:;
/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":322
* can_skip = not main_debugger.plugin.can_not_skip(main_debugger, None, <object> frame_obj)
* can_skip = not main_debugger.plugin.can_not_skip(main_debugger, <object> frame_obj)
*
* if not can_skip: # <<<<<<<<<<<<<<
* # if DEBUG:
@ -6894,7 +6891,7 @@ static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytec
* if not func_code_info.always_skip_code:
*
* if main_debugger.has_plugin_line_breaks: # <<<<<<<<<<<<<<
* can_skip = not main_debugger.plugin.can_not_skip(main_debugger, None, <object> frame_obj)
* can_skip = not main_debugger.plugin.can_not_skip(main_debugger, <object> frame_obj)
*
*/
}

View file

@ -317,7 +317,7 @@ cdef PyObject * get_bytecode_while_frame_eval(PyFrameObject * frame_obj, int exc
if not func_code_info.always_skip_code:
if main_debugger.has_plugin_line_breaks:
can_skip = not main_debugger.plugin.can_not_skip(main_debugger, None, <object> frame_obj)
can_skip = not main_debugger.plugin.can_not_skip(main_debugger, <object> frame_obj)
if not can_skip:
# if DEBUG:

View file

@ -65,7 +65,6 @@ def _modify_new_lines(code_to_modify, offset, code_to_insert):
byte_increments = code_to_modify.co_lnotab[0::2]
line_increments = code_to_modify.co_lnotab[1::2]
if offset == 0:
new_list[0] += bytecode_delta
else:

View file

@ -25,6 +25,7 @@ from _pydev_imps._pydev_saved_modules import thread
from _pydev_imps._pydev_saved_modules import threading
from _pydev_imps._pydev_saved_modules import time
from _pydevd_bundle import pydevd_extension_utils
from _pydevd_bundle.pydevd_filtering import FilesFiltering
from _pydevd_bundle import pydevd_io, pydevd_vm_type
from _pydevd_bundle import pydevd_utils
from _pydevd_bundle.pydevd_additional_thread_info import set_additional_thread_info
@ -102,9 +103,9 @@ def install_breakpointhook(pydevd_breakpointhook=None):
sys.breakpointhook = pydevd_breakpointhook
else:
if sys.version_info[0] >= 3:
import builtins as __builtin__ # Py3
import builtins as __builtin__ # Py3 noqa
else:
import __builtin__
import __builtin__ # noqa
# In older versions, breakpoint() isn't really available, so, install the hook directly
# in the builtins.
@ -359,6 +360,7 @@ class PyDB(object):
self.cmd_factory = NetCommandFactory()
self._cmd_queue = defaultdict(_queue.Queue) # Key is thread id or '*', value is Queue
self.suspended_frames_manager = SuspendedFramesManager()
self._files_filtering = FilesFiltering()
self.breakpoints = {}
@ -422,8 +424,9 @@ class PyDB(object):
self._filename_to_not_in_scope = {}
self.first_breakpoint_reached = False
self.is_filter_enabled = pydevd_utils.is_filter_enabled()
self.is_filter_libraries = pydevd_utils.is_filter_libraries()
self._exclude_filters_enabled = self._files_filtering.use_exclude_filters()
self._is_libraries_filter_enabled = self._files_filtering.use_libraries_filter()
self.is_files_filter_enabled = self._exclude_filters_enabled or self._is_libraries_filter_enabled
self.show_return_values = False
self.remove_return_values_flag = False
self.redirect_output = False
@ -452,7 +455,7 @@ class PyDB(object):
self.threading_active = threading._active
except:
try:
self.threading_get_ident = threading._get_ident # Python 2
self.threading_get_ident = threading._get_ident # Python 2 noqa
self.threading_active = threading._active
except:
self.threading_get_ident = None # Jython
@ -465,13 +468,17 @@ class PyDB(object):
self._dont_trace_get_file_type = DONT_TRACE.get
self.PYDEV_FILE = PYDEV_FILE
self._in_project_scope_cache = {}
self._exclude_by_filter_cache = {}
self._apply_filter_cache = {}
def add_fake_frame(self, thread_id, frame_id, frame):
self.suspended_frames_manager.add_fake_frame(thread_id, frame_id, frame)
def handle_breakpoint_condition(self, info, breakpoint, new_frame):
condition = breakpoint.condition
def handle_breakpoint_condition(self, info, pybreakpoint, new_frame):
condition = pybreakpoint.condition
try:
if breakpoint.handle_hit_condition(new_frame):
if pybreakpoint.handle_hit_condition(new_frame):
return True
if condition is None:
@ -481,7 +488,7 @@ class PyDB(object):
except Exception as e:
if IS_PY2:
# Must be bytes on py2.
if isinstance(condition, unicode):
if isinstance(condition, unicode): # noqa
condition = condition.encode('utf-8')
if not isinstance(e, self.skip_print_breakpoint_exception):
@ -510,10 +517,10 @@ class PyDB(object):
finally:
etype, value, tb = None, None, None
def handle_breakpoint_expression(self, breakpoint, info, new_frame):
def handle_breakpoint_expression(self, pybreakpoint, info, new_frame):
try:
try:
val = eval(breakpoint.expression, new_frame.f_globals, new_frame.f_locals)
val = eval(pybreakpoint.expression, new_frame.f_globals, new_frame.f_locals)
except:
val = sys.exc_info()[1]
finally:
@ -628,21 +635,101 @@ class PyDB(object):
self.plugin = PluginManager(self)
return self.plugin
def in_project_scope(self, filename, cache={}):
def in_project_scope(self, filename):
try:
return cache[filename]
return self._in_project_scope_cache[filename]
except KeyError:
cache = self._in_project_scope_cache
abs_real_path_and_basename = get_abs_path_real_path_and_base_from_file(filename)
# pydevd files are nevere considered to be in the project scope.
# pydevd files are never considered to be in the project scope.
if self.get_file_type(abs_real_path_and_basename) == self.PYDEV_FILE:
cache[filename] = False
else:
cache[filename] = pydevd_utils.in_project_roots(filename)
cache[filename] = self._files_filtering.in_project_roots(filename)
return cache[filename]
def is_ignored_by_filters(self, filename):
return pydevd_utils.is_ignored_by_filter(filename)
def _clear_filters_caches(self):
self._in_project_scope_cache.clear()
self._exclude_by_filter_cache.clear()
self._apply_filter_cache.clear()
self._exclude_filters_enabled = self._files_filtering.use_exclude_filters()
self._is_libraries_filter_enabled = self._files_filtering.use_libraries_filter()
self.is_files_filter_enabled = self._exclude_filters_enabled or self._is_libraries_filter_enabled
def _exclude_by_filter(self, frame, filename):
'''
:param str filename:
The filename to filter.
:return: True if it should be excluded, False if it should be included and None
if no rule matched the given file.
'''
try:
return self._exclude_by_filter_cache[filename]
except KeyError:
cache = self._exclude_by_filter_cache
abs_real_path_and_basename = get_abs_path_real_path_and_base_from_file(filename)
# pydevd files are always filtered out
if self.get_file_type(abs_real_path_and_basename) == self.PYDEV_FILE:
cache[filename] = True
else:
module_name = None
if self._files_filtering.require_module:
module_name = frame.f_globals.get('__name__')
cache[filename] = self._files_filtering.exclude_by_filter(filename, module_name)
return cache[filename]
def apply_files_filter(self, frame, filename, force_check_project_scope):
'''
Should only be called if `self.is_files_filter_enabled == True`.
Note that it covers both the filter by specific paths includes/excludes as well
as the check which filters out libraries if not in the project scope.
:param force_check_project_scope:
Check that the file is in the project scope even if the global setting
is off.
:return bool:
True if it should be excluded when stepping and False if it should be
included.
'''
cache_key = (frame.f_code.co_firstlineno, frame.f_code.co_name, filename, force_check_project_scope)
try:
return self._apply_filter_cache[cache_key]
except KeyError:
if self.plugin is not None and self.has_plugin_line_breaks:
# If it's explicitly needed by some plugin, we can't skip it.
if self.plugin.can_not_skip(self, frame):
# print('include (include by plugins): %s' % filename)
self._apply_filter_cache[cache_key] = False
return False
if self._exclude_filters_enabled:
exclude_by_filter = self._exclude_by_filter(frame, filename)
if exclude_by_filter is not None:
if exclude_by_filter:
# ignore files matching stepping filters
# print('exclude (filtered out): %s' % filename)
self._apply_filter_cache[cache_key] = True
return True
else:
# print('include (explicitly included): %s' % filename)
self._apply_filter_cache[cache_key] = False
return False
if (self._is_libraries_filter_enabled or force_check_project_scope) and not self.in_project_scope(filename):
# print('exclude (not on project): %s' % filename)
# ignore library files while stepping
self._apply_filter_cache[cache_key] = True
return True
# print('include (on project): %s' % filename)
self._apply_filter_cache[cache_key] = False
return False
def is_exception_trace_in_project_scope(self, trace):
if trace is None or not self.in_project_scope(trace.tb_frame.f_code.co_filename):
@ -655,6 +742,21 @@ class PyDB(object):
trace = trace.tb_next
return True
def set_project_roots(self, project_roots):
self._files_filtering.set_project_roots(project_roots)
self._clear_skip_caches()
self._clear_filters_caches()
def set_exclude_filters(self, exclude_filters):
self._files_filtering.set_exclude_filters(exclude_filters)
self._clear_skip_caches()
self._clear_filters_caches()
def set_use_libraries_filter(self, use_libraries_filter):
self._files_filtering.set_use_libraries_filter(use_libraries_filter)
self._clear_skip_caches()
self._clear_filters_caches()
def has_threads_alive(self):
for t in pydevd_utils.get_non_pydevd_threads():
if isinstance(t, PyDBDaemonThread):
@ -905,13 +1007,13 @@ class PyDB(object):
def consolidate_breakpoints(self, file, id_to_breakpoint, breakpoints):
break_dict = {}
for breakpoint_id, pybreakpoint in dict_iter_items(id_to_breakpoint):
for _breakpoint_id, pybreakpoint in dict_iter_items(id_to_breakpoint):
break_dict[pybreakpoint.line] = pybreakpoint
breakpoints[file] = break_dict
self.clear_skip_caches()
self._clear_skip_caches()
def clear_skip_caches(self):
def _clear_skip_caches(self):
global_cache_skips.clear()
global_cache_frame_skips.clear()
@ -1196,7 +1298,7 @@ class PyDB(object):
stop = False
response_msg = ""
try:
stop, old_line, response_msg = self.set_next_statement(frame, event, info.pydev_func_name, info.pydev_next_line)
stop, _old_line, response_msg = self.set_next_statement(frame, event, info.pydev_func_name, info.pydev_next_line)
except ValueError as e:
response_msg = "%s" % e
finally:
@ -1227,13 +1329,15 @@ class PyDB(object):
elif info.pydev_step_cmd in (CMD_STEP_RETURN, CMD_STEP_RETURN_MY_CODE):
back_frame = frame.f_back
if info.pydev_step_cmd == CMD_STEP_RETURN_MY_CODE:
force_check_project_scope = info.pydev_step_cmd == CMD_STEP_RETURN_MY_CODE
if force_check_project_scope or self.is_files_filter_enabled:
while back_frame is not None:
if self.in_project_scope(back_frame.f_code.co_filename):
break
else:
if self.apply_files_filter(back_frame, back_frame.f_code.co_filename, force_check_project_scope):
frame = back_frame
back_frame = back_frame.f_back
else:
break
if back_frame is not None:
# steps back to the same frame (in a return call it will stop in the 'back frame' for the user)
@ -1571,7 +1675,7 @@ class _CustomWriter(object):
if s:
if IS_PY2:
# Need s in bytes
if isinstance(s, unicode):
if isinstance(s, unicode): # noqa
# Note: python 2.6 does not accept the "errors" keyword.
s = s.encode('utf-8', 'replace')
else:

View file

@ -1,12 +1,12 @@
from _pydevd_bundle.pydevd_comm import CMD_SET_BREAK, CMD_ADD_EXCEPTION_BREAK
import inspect
from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, dict_iter_items, DJANGO_SUSPEND, IS_PY2, get_current_thread_id
from pydevd_file_utils import get_abs_path_real_path_and_base_from_file, normcase
from _pydevd_bundle.pydevd_breakpoints import LineBreakpoint
from _pydevd_bundle import pydevd_vars
import traceback
from _pydev_bundle import pydev_log
from _pydevd_bundle.pydevd_breakpoints import LineBreakpoint
from _pydevd_bundle.pydevd_comm import CMD_SET_BREAK, CMD_ADD_EXCEPTION_BREAK
from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, dict_iter_items, DJANGO_SUSPEND, IS_PY2
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, FCode, just_raised, ignore_exception_trace
from pydevd_file_utils import get_abs_path_real_path_and_base_from_file, normcase
IS_DJANGO18 = False
IS_DJANGO19 = False
@ -22,6 +22,7 @@ except:
class DjangoLineBreakpoint(LineBreakpoint):
def __init__(self, file, line, condition, func_name, expression, hit_condition=None, is_logpoint=False):
self.file = file
LineBreakpoint.__init__(self, line, condition, func_name, expression, hit_condition=hit_condition, is_logpoint=is_logpoint)
@ -41,6 +42,7 @@ def add_line_breakpoint(plugin, pydb, type, file, line, condition, expression, f
return breakpoint, pydb.django_breakpoints
return None
def add_exception_breakpoint(plugin, pydb, type, exception):
if type == 'django':
if not hasattr(pydb, 'django_exception_break'):
@ -49,10 +51,12 @@ def add_exception_breakpoint(plugin, pydb, type, exception):
return True
return False
def _init_plugin_breaks(pydb):
pydb.django_exception_break = {}
pydb.django_breakpoints = {}
def remove_exception_breakpoint(plugin, pydb, type, exception):
if type == 'django':
try:
@ -62,11 +66,13 @@ def remove_exception_breakpoint(plugin, pydb, type, exception):
pass
return False
def get_breakpoints(plugin, pydb, type):
if type == 'django-line':
return pydb.django_breakpoints
return None
def _inherits(cls, *names):
if cls.__name__ in names:
return True
@ -165,6 +171,7 @@ def _find_django_render_frame(frame):
# Django Frame
#=======================================================================================================================
def _read_file(filename):
# type: (str) -> str
if IS_PY2:
@ -281,6 +288,7 @@ def _get_template_line(frame):
class DjangoTemplateFrame:
def __init__(self, frame):
file_name = _get_template_file_name(frame)
self.back_context = frame.f_locals['context']
@ -323,12 +331,12 @@ def _is_django_exception_break_context(frame):
name = None
return name in ['_resolve_lookup', 'find_template']
#=======================================================================================================================
# Django Step Commands
#=======================================================================================================================
def can_not_skip(plugin, main_debugger, pydb_frame, frame):
def can_not_skip(plugin, main_debugger, frame):
return main_debugger.django_breakpoints and _is_django_render_call(frame)
@ -419,6 +427,7 @@ def suspend(plugin, main_debugger, thread, frame, bp_type):
return suspend_django(main_debugger, thread, frame)
return None
def exception_break(plugin, main_debugger, pydb_frame, frame, args, arg):
main_debugger = args[0]
thread = args[3]

View file

@ -6,6 +6,7 @@ from _pydevd_bundle import pydevd_vars
from pydevd_file_utils import get_abs_path_real_path_and_base_from_file
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, FCode
class Jinja2LineBreakpoint(LineBreakpoint):
def __init__(self, file, line, condition, func_name, expression, hit_condition=None, is_logpoint=False):
@ -29,6 +30,7 @@ def add_line_breakpoint(plugin, pydb, type, file, line, condition, expression, f
return result
return result
def add_exception_breakpoint(plugin, pydb, type, exception):
if type == 'jinja2':
if not hasattr(pydb, 'jinja2_exception_break'):
@ -37,10 +39,12 @@ def add_exception_breakpoint(plugin, pydb, type, exception):
return True
return False
def _init_plugin_breaks(pydb):
pydb.jinja2_exception_break = {}
pydb.jinja2_breakpoints = {}
def remove_exception_breakpoint(plugin, pydb, type, exception):
if type == 'jinja2':
try:
@ -50,6 +54,7 @@ def remove_exception_breakpoint(plugin, pydb, type, exception):
pass
return False
def get_breakpoints(plugin, pydb, type):
if type == 'jinja2-line':
return pydb.jinja2_breakpoints
@ -84,27 +89,31 @@ def _suspend_jinja2(pydb, thread, frame, cmd=CMD_SET_BREAK, message=None):
return frame
def _is_jinja2_suspended(thread):
return thread.additional_info.suspend_type == JINJA2_SUSPEND
def _is_jinja2_context_call(frame):
return "_Context__obj" in frame.f_locals
def _is_jinja2_internal_function(frame):
return 'self' in frame.f_locals and frame.f_locals['self'].__class__.__name__ in \
('LoopContext', 'TemplateReference', 'Macro', 'BlockReference')
def _find_jinja2_render_frame(frame):
while frame is not None and not _is_jinja2_render_call(frame):
frame = frame.f_back
return frame
#=======================================================================================================================
# Jinja2 Frame
#=======================================================================================================================
class Jinja2TemplateFrame:
def __init__(self, frame):
@ -170,6 +179,7 @@ def _is_missing(item):
return True
return False
def _find_render_function_frame(frame):
# in order to hide internal rendering functions
old_frame = frame
@ -183,6 +193,7 @@ def _find_render_function_frame(frame):
except:
return old_frame
def _get_jinja2_template_line(frame):
debug_info = None
if '__jinja_template__' in frame.f_globals:
@ -202,6 +213,7 @@ def _get_jinja2_template_line(frame):
return None
def _get_jinja2_template_filename(frame):
if '__jinja_template__' in frame.f_globals:
fname = frame.f_globals['__jinja_template__'].filename
@ -209,7 +221,6 @@ def _get_jinja2_template_filename(frame):
return abs_path_real_path_and_base[1]
return None
#=======================================================================================================================
# Jinja2 Step Commands
#=======================================================================================================================
@ -220,13 +231,15 @@ def has_exception_breaks(plugin):
return True
return False
def has_line_breaks(plugin):
for file, breakpoints in dict_iter_items(plugin.main_debugger.jinja2_breakpoints):
if len(breakpoints) > 0:
return True
return False
def can_not_skip(plugin, pydb, pydb_frame, frame):
def can_not_skip(plugin, pydb, frame):
if pydb.jinja2_breakpoints and _is_jinja2_render_call(frame):
filename = _get_jinja2_template_filename(frame)
jinja2_breakpoints_for_file = pydb.jinja2_breakpoints.get(filename)

View file

@ -186,6 +186,7 @@ class ReaderThread(threading.Thread):
from Queue import Queue
self.setDaemon(True)
self._buffer = b''
self.sock = sock
self._queue = Queue()
self._kill = False
@ -220,13 +221,45 @@ class ReaderThread(threading.Thread):
sys.stdout.write('Message returned in get_next_message(): %s -- ctx: %s, asked at:\n%s\n' % (unquote_plus(unquote_plus(msg)), context_message, frame_info))
return msg
def _read(self, size):
while True:
buffer_len = len(self._buffer)
if buffer_len == size:
ret = self._buffer
self._buffer = b''
return ret
if buffer_len > size:
ret = self._buffer[:size]
self._buffer = self._buffer[size:]
return ret
r = self.sock.recv(max(size - buffer_len, 1024))
if not r:
return b''
self._buffer += r
def _read_line(self):
while True:
i = self._buffer.find(b'\n')
if i != -1:
i += 1 # Add the newline to the return
ret = self._buffer[:i]
self._buffer = self._buffer[i:]
return ret
else:
r = self.sock.recv(1024)
if not r:
return b''
self._buffer += r
def run(self):
try:
content_len = -1
stream = self.sock.makefile('rb')
while not self._kill:
line = stream.readline()
line = self._read_line()
if not line:
break
@ -239,19 +272,40 @@ class ReaderThread(threading.Thread):
if line.startswith(b'Content-Length:'):
content_len = int(line.strip().split(b':', 1)[1])
continue
elif content_len != -1:
if content_len != -1:
# If we previously received a content length, read until a '\r\n'.
if line == b'\r\n':
msg = stream.read(content_len)
json_contents = self._read(content_len)
content_len = -1
if len(json_contents) == 0:
self.handle_except()
return # Finished communication.
msg = json_contents
if IS_PY3K:
msg = msg.decode('utf-8')
print('Test Reader Thread Received %s' % (msg,))
self._queue.put(msg)
continue
else:
# No content len, regular line-based protocol message (remove trailing new-line).
if line.endswith(b'\n\n'):
line = line[:-2]
elif line.endswith(b'\n'):
line = line[:-1]
elif line.endswith(b'\r'):
line = line[:-1]
msg = line
if IS_PY3K:
msg = msg.decode('utf-8')
print('Test Reader Thread Received %s' % (msg,))
self._queue.put(msg)
except:
@ -738,6 +792,7 @@ class AbstractWriterThread(threading.Thread):
for r in reason:
if ('stop_reason="%s"' % (r,)) in last:
return True
return False
msg = self.wait_for_message(accept_message, timeout=timeout)

View file

@ -451,7 +451,6 @@ class TestInsertCode(unittest.TestCase):
self.check_insert_to_line_by_symbols(foo, call_tracing, foo.__code__.co_firstlineno + 2,
check_line_2.__code__)
finally:
sys.stdout = self.original_stdout

View file

@ -64,7 +64,7 @@ def test_to_server_and_to_client(tmpdir):
('c:\\foo\\', 'c:\\bar'),
('c:/foo/', 'c:\\bar'),
('c:\\foo\\', 'c:/bar'),
]):
PATHS_FROM_ECLIPSE_TO_PYTHON = [
(in_eclipse, in_python)
@ -86,7 +86,7 @@ def test_to_server_and_to_client(tmpdir):
('/foo/', 'c:\\bar'),
('/foo/', 'c:\\bar\\'),
]):
PATHS_FROM_ECLIPSE_TO_PYTHON = [
(in_eclipse, in_python)
]
@ -142,25 +142,30 @@ def test_to_server_and_to_client(tmpdir):
# Client on windows and server on unix
pydevd_file_utils.set_ide_os('WINDOWS')
for in_eclipse, in_python in ([
('c:\\foo', '/bar'),
('c:/foo', '/bar'),
('c:/foo/', '/bar'),
('c:\\foo', '/báéíóúr'),
('c:/foo', '/báéíóúr'),
('c:/foo/', '/báéíóúr'),
('c:/foo/', '/báéíóúr/'),
('c:\\foo\\', '/báéíóúr/'),
]):
PATHS_FROM_ECLIPSE_TO_PYTHON = [
(in_eclipse, in_python)
]
pydevd_file_utils.setup_client_server_paths(PATHS_FROM_ECLIPSE_TO_PYTHON)
assert pydevd_file_utils.norm_file_to_server('c:\\foo\\my') == '/bar/my'
assert pydevd_file_utils.norm_file_to_server('c:/foo/my') == '/bar/my'
assert pydevd_file_utils.norm_file_to_server('c:\\foo\\my\\') == '/bar/my'
assert pydevd_file_utils.norm_file_to_server('c:/foo/my/') == '/bar/my'
assert pydevd_file_utils.norm_file_to_client('/bar/my') == 'c:\\foo\\my'
assert pydevd_file_utils.norm_file_to_client('/bar/my/') == 'c:\\foo\\my'
assert pydevd_file_utils.norm_file_to_server('c:\\foo\\my') == '/báéíóúr/my'
assert pydevd_file_utils.norm_file_to_server('c:\\foo\\my\\file.py') == '/báéíóúr/my/file.py'
assert pydevd_file_utils.norm_file_to_server('c:\\foo\\my\\other\\file.py') == '/báéíóúr/my/other/file.py'
assert pydevd_file_utils.norm_file_to_server('c:/foo/my') == '/báéíóúr/my'
assert pydevd_file_utils.norm_file_to_server('c:\\foo\\my\\') == '/báéíóúr/my'
assert pydevd_file_utils.norm_file_to_server('c:/foo/my/') == '/báéíóúr/my'
assert pydevd_file_utils.norm_file_to_client('/báéíóúr/my') == 'c:\\foo\\my'
assert pydevd_file_utils.norm_file_to_client('/báéíóúr/my/') == 'c:\\foo\\my'
# Files for which there's no translation have only their separators updated.
assert pydevd_file_utils.norm_file_to_client('/usr/bin/x.py') == '\\usr\\bin\\x.py'
assert pydevd_file_utils.norm_file_to_client('/usr/bin') == '\\usr\\bin'
assert pydevd_file_utils.norm_file_to_client('/usr/bin/') == '\\usr\\bin'
assert pydevd_file_utils.norm_file_to_server('\\usr\\bin') == '/usr/bin'
@ -169,13 +174,13 @@ def test_to_server_and_to_client(tmpdir):
# Client and server on unix
pydevd_file_utils.set_ide_os('UNIX')
in_eclipse = '/foo'
in_python = '/bar'
in_python = '/báéíóúr'
PATHS_FROM_ECLIPSE_TO_PYTHON = [
(in_eclipse, in_python)
]
pydevd_file_utils.setup_client_server_paths(PATHS_FROM_ECLIPSE_TO_PYTHON)
assert pydevd_file_utils.norm_file_to_server('/foo/my') == '/bar/my'
assert pydevd_file_utils.norm_file_to_client('/bar/my') == '/foo/my'
assert pydevd_file_utils.norm_file_to_server('/foo/my') == '/báéíóúr/my'
assert pydevd_file_utils.norm_file_to_client('/báéíóúr/my') == '/foo/my'
finally:
pydevd_file_utils.setup_client_server_paths([])

View file

@ -20,6 +20,7 @@ from tests_python.debugger_unittest import (CMD_SET_PROPERTY_TRACE, REASON_CAUGH
REASON_STEP_OVER_MY_CODE, REASON_STEP_INTO, CMD_THREAD_KILL)
from _pydevd_bundle.pydevd_constants import IS_WINDOWS
from _pydevd_bundle.pydevd_comm_constants import CMD_RELOAD_CODE
import json
try:
from urllib import unquote
except ImportError:
@ -51,14 +52,12 @@ try:
except:
pass
if IS_PY2:
builtin_qualifier = "__builtin__"
else:
builtin_qualifier = "builtins"
@pytest.mark.skipif(IS_IRONPYTHON, reason='Test needs gc.get_referrers to really check anything.')
def test_case_referrers(case_setup):
with case_setup.test_file('_debugger_case1.py') as writer:
@ -901,7 +900,15 @@ def test_case_flask(case_setup_flask):
@pytest.mark.skipif(not TEST_DJANGO, reason='No django available')
def test_case_django_a(case_setup_django):
with case_setup_django.test_file(EXPECTED_RETURNCODE='any') as writer:
def get_environ(writer):
env = os.environ.copy()
env.update({
'PYDEVD_FILTER_LIBRARIES': '1', # Global setting for in project or not
})
return env
with case_setup_django.test_file(EXPECTED_RETURNCODE='any', get_environ=get_environ) as writer:
writer.write_add_breakpoint_django(5, None, 'index.html')
writer.write_make_initial_run()
@ -2570,7 +2577,6 @@ def test_return_value(case_setup):
writer.write_step_over(hit.thread_id)
hit = writer.wait_for_breakpoint_hit(REASON_STEP_OVER, name='<module>', line=break_line + 1)
writer.write_get_frame(hit.thread_id, hit.frame_id)
writer.wait_for_vars([
[
'<var name="method1" type="int" qualifier="%s" value="int: 1" isRetVal="True"' % (builtin_qualifier,),
@ -2829,6 +2835,110 @@ def test_step_over_my_code(case_setup):
writer.finished_ok = True
@pytest.fixture(
params=[
'step_over',
'step_return',
'step_in',
]
)
def step_method(request):
return request.param
@pytest.mark.parametrize("environ", [
{'PYDEVD_FILTER_LIBRARIES': '1'}, # Global setting for step over
{'PYDEVD_FILTERS': json.dumps({'**/other.py': True})}, # specify as json
{'PYDEVD_FILTERS': '**/other.py'}, # specify ';' separated list
])
def test_step_over_my_code_global_settings(case_setup, environ, step_method):
def get_environ(writer):
env = os.environ.copy()
env.update(environ)
return env
def do_step():
if step_method == 'step_over':
writer.write_step_over(hit.thread_id)
return REASON_STEP_INTO # Note: goes from step over to step into
elif step_method == 'step_return':
writer.write_step_return(hit.thread_id)
return REASON_STEP_RETURN
else:
assert step_method == 'step_in'
writer.write_step_in(hit.thread_id)
return REASON_STEP_INTO
with case_setup.test_file('my_code/my_code.py', get_environ=get_environ) as writer:
writer.write_set_project_roots([debugger_unittest._get_debugger_test_file('my_code')])
writer.write_add_breakpoint(writer.get_line_index_with_content('break here'))
writer.write_make_initial_run()
hit = writer.wait_for_breakpoint_hit()
writer.write_step_in(hit.thread_id)
hit = writer.wait_for_breakpoint_hit(reason=REASON_STEP_INTO)
assert hit.name == 'callback1'
writer.write_step_in(hit.thread_id)
hit = writer.wait_for_breakpoint_hit(reason=REASON_STEP_INTO)
assert hit.name == 'callback2'
stop_reason = do_step()
hit = writer.wait_for_breakpoint_hit(reason=stop_reason)
assert hit.name == 'callback1'
stop_reason = do_step()
hit = writer.wait_for_breakpoint_hit(reason=stop_reason)
assert hit.name == '<module>'
if IS_JYTHON:
# Jython may get to exit functions, so, just resume the thread.
writer.write_run_thread(hit.thread_id)
else:
stop_reason = do_step()
if step_method != 'step_return':
stop_reason = do_step()
if step_method == 'step_over':
stop_reason = REASON_STEP_OVER
hit = writer.wait_for_breakpoint_hit(reason=stop_reason)
assert hit.name == '<module>'
writer.write_step_over(hit.thread_id)
writer.finished_ok = True
def test_step_over_my_code_global_setting_and_explicit_include(case_setup):
def get_environ(writer):
env = os.environ.copy()
env.update({
'PYDEVD_FILTER_LIBRARIES': '1', # Global setting for in project or not
# specify as json (force include).
'PYDEVD_FILTERS': json.dumps({'**/other.py': False})
})
return env
with case_setup.test_file('my_code/my_code.py', get_environ=get_environ) as writer:
writer.write_set_project_roots([debugger_unittest._get_debugger_test_file('my_code')])
writer.write_add_breakpoint(writer.get_line_index_with_content('break here'))
writer.write_make_initial_run()
hit = writer.wait_for_breakpoint_hit()
writer.write_step_in(hit.thread_id)
hit = writer.wait_for_breakpoint_hit(reason=REASON_STEP_INTO)
# Although we filtered out non-project files, other.py is explicitly included.
assert hit.name == 'call_me_back1'
writer.write_run_thread(hit.thread_id)
writer.finished_ok = True
def test_matplotlib_activation(case_setup):
try:
import matplotlib

View file

@ -1,8 +1,11 @@
import pytest
from _pydevd_bundle._debug_adapter import pydevd_schema, pydevd_base_schema
from _pydevd_bundle._debug_adapter.pydevd_base_schema import from_json
from _pydevd_bundle._debug_adapter.pydevd_schema import ThreadEvent
from tests_python.debugger_unittest import IS_JYTHON
import pytest
from _pydevd_bundle._debug_adapter import pydevd_schema, pydevd_base_schema
from tests_python import debugger_unittest
from tests_python.debugger_unittest import IS_JYTHON, REASON_STEP_INTO, REASON_STEP_OVER
import json
pytest_plugins = [
str('tests_python.debugger_fixtures'),
@ -31,16 +34,24 @@ class JsonFacade(object):
response_class = pydevd_base_schema.get_response_class(request)
def accept_message(response):
if response.request_seq == request.seq:
return True
if isinstance(request, dict):
if response.request_seq == request['seq']:
return True
else:
if response.request_seq == request.seq:
return True
return False
return self.wait_for_json_message(response_class, accept_message)
def write_request(self, request):
seq = self.writer.next_seq()
request.seq = seq
self.writer.write_with_content_len(request.to_json())
if isinstance(request, dict):
request['seq'] = seq
self.writer.write_with_content_len(json.dumps(request))
else:
request.seq = seq
self.writer.write_with_content_len(request.to_json())
return request
def write_make_initial_run(self):
@ -91,9 +102,9 @@ class JsonFacade(object):
lines_in_response = [b['line'] for b in body.breakpoints]
assert set(lines_in_response) == set(lines)
def write_launch(self):
arguments = pydevd_schema.LaunchRequestArguments(noDebug=False)
request = pydevd_schema.LaunchRequest(arguments)
def write_launch(self, **arguments):
arguments['noDebug'] = False
request = {'type': 'request', 'command': 'launch', 'arguments': arguments, 'seq':-1}
self.wait_for_response(self.write_request(request))
def write_disconnect(self):
@ -150,6 +161,105 @@ def test_case_json_protocol(case_setup):
writer.finished_ok = True
@pytest.mark.parametrize("custom_setup", [
'set_exclude_launch_module_full',
'set_exclude_launch_module_prefix',
'set_exclude_launch_path_match_filename',
'set_exclude_launch_path_match_folder',
'set_just_my_code',
'set_just_my_code_and_include',
])
def test_case_skipping_filters(case_setup, custom_setup):
with case_setup.test_file('my_code/my_code.py') as writer:
json_facade = JsonFacade(writer)
writer.write_set_protocol('http_json')
if custom_setup == 'set_exclude_launch_path_match_filename':
json_facade.write_launch(
debugStdLib=True,
rules=[
{'path': '**/other.py', 'include':False},
]
)
elif custom_setup == 'set_exclude_launch_path_match_folder':
json_facade.write_launch(
debugStdLib=True,
rules=[
{'path': debugger_unittest._get_debugger_test_file('not_my_code'), 'include':False},
]
)
elif custom_setup == 'set_exclude_launch_module_full':
json_facade.write_launch(
debugStdLib=True,
rules=[
{'module': 'not_my_code.other', 'include':False},
]
)
elif custom_setup == 'set_exclude_launch_module_prefix':
json_facade.write_launch(
debugStdLib=True,
rules=[
{'module': 'not_my_code', 'include':False},
]
)
elif custom_setup == 'set_just_my_code':
writer.write_set_project_roots([debugger_unittest._get_debugger_test_file('my_code')])
json_facade.write_launch(debugStdLib=False)
elif custom_setup == 'set_just_my_code_and_include':
# I.e.: nothing in my_code (add it with rule).
writer.write_set_project_roots([debugger_unittest._get_debugger_test_file('launch')])
json_facade.write_launch(
debugStdLib=False,
rules=[
{'module': '__main__', 'include':True},
]
)
else:
raise AssertionError('Unhandled: %s' % (custom_setup,))
json_facade.write_add_breakpoints(writer.get_line_index_with_content('break here'))
json_facade.write_make_initial_run()
json_facade.wait_for_json_message(ThreadEvent, lambda event: event.body.reason == 'started')
hit = writer.wait_for_breakpoint_hit()
writer.write_step_in(hit.thread_id)
hit = writer.wait_for_breakpoint_hit(reason=REASON_STEP_INTO)
assert hit.name == 'callback1'
writer.write_step_in(hit.thread_id)
hit = writer.wait_for_breakpoint_hit(reason=REASON_STEP_INTO)
assert hit.name == 'callback2'
writer.write_step_over(hit.thread_id)
hit = writer.wait_for_breakpoint_hit(reason=REASON_STEP_INTO) # Note: goes from step over to step into
assert hit.name == 'callback1'
writer.write_step_over(hit.thread_id)
hit = writer.wait_for_breakpoint_hit(reason=REASON_STEP_INTO) # Note: goes from step over to step into
assert hit.name == '<module>'
writer.write_step_over(hit.thread_id)
hit = writer.wait_for_breakpoint_hit(reason=REASON_STEP_OVER)
assert hit.name == '<module>'
writer.write_step_over(hit.thread_id)
if IS_JYTHON:
writer.write_run_thread(hit.thread_id)
else:
writer.write_step_over(hit.thread_id)
writer.finished_ok = True
def test_case_completions_json(case_setup):
with case_setup.test_file('_debugger_case_print.py') as writer:
json_facade = JsonFacade(writer)
@ -188,3 +298,8 @@ def test_case_completions_json(case_setup):
writer.write_run_thread(thread_id)
writer.finished_ok = True
if __name__ == '__main__':
pytest.main(['-k', 'test_case_skipping_filters', '-s'])

View file

@ -22,11 +22,11 @@ class _DummySocket(object):
self._socket_server.bind((host, 0))
self._socket_server.listen(1)
def makefile(self, *args, **kwargs):
assert not self._sock_for_reader_thread
sock, _addr = self._socket_server.accept()
self._sock_for_reader_thread = sock
return sock.makefile(*args, **kwargs)
def recv(self, *args, **kwargs):
if self._sock_for_reader_thread is None:
sock, _addr = self._socket_server.accept()
self._sock_for_reader_thread = sock
return self._sock_for_reader_thread.recv(*args, **kwargs)
def put(self, msg):
if IS_PY3K and not isinstance(msg, bytes):
@ -86,9 +86,9 @@ def test_fixture_reader_thread2(_dummy_socket):
http = ('Content-Length: %s\r\n\r\n%s' % (len(msg), msg))
sock.put('msg1\nmsg2\nmsg3\n' + http + http)
assert reader_thread.get_next_message('check 1') == 'msg1\n'
assert reader_thread.get_next_message('check 2') == 'msg2\n'
assert reader_thread.get_next_message('check 3') == 'msg3\n'
assert reader_thread.get_next_message('check 1') == 'msg1'
assert reader_thread.get_next_message('check 2') == 'msg2'
assert reader_thread.get_next_message('check 3') == 'msg3'
assert reader_thread.get_next_message('check 4') == json_part
assert reader_thread.get_next_message('check 5') == json_part

View file

@ -1,65 +0,0 @@
def test_in_project_roots(tmpdir):
from _pydevd_bundle import pydevd_utils
import os.path
import sys
assert pydevd_utils._get_library_roots() == [
os.path.normcase(x) for x in pydevd_utils._get_default_library_roots()]
site_packages = tmpdir.mkdir('site-packages')
project_dir = tmpdir.mkdir('project')
project_dir_inside_site_packages = str(site_packages.mkdir('project'))
site_packages_inside_project_dir = str(project_dir.mkdir('site-packages'))
# Convert from pytest paths to str.
site_packages = str(site_packages)
project_dir = str(project_dir)
tmpdir = str(tmpdir)
# Test permutations of project dir inside site packages and vice-versa.
pydevd_utils.set_project_roots([project_dir, project_dir_inside_site_packages])
pydevd_utils.set_library_roots([site_packages, site_packages_inside_project_dir])
check = [
(tmpdir, False),
(site_packages, False),
(site_packages_inside_project_dir, False),
(project_dir, True),
(project_dir_inside_site_packages, True),
]
for (check_path, find) in check[:]:
check.append((os.path.join(check_path, 'a.py'), find))
for check_path, find in check:
assert pydevd_utils.in_project_roots(check_path) == find
pydevd_utils.set_project_roots([])
pydevd_utils.set_library_roots([site_packages, site_packages_inside_project_dir])
# If the IDE did not set the project roots, consider anything not in the site
# packages as being in a project root (i.e.: we can calculate default values for
# site-packages but not for project roots).
check = [
(tmpdir, True),
(site_packages, False),
(site_packages_inside_project_dir, False),
(project_dir, True),
(project_dir_inside_site_packages, False),
(os.path.join(tmpdir, '<foo>'), False),
]
for check_path, find in check:
assert pydevd_utils.in_project_roots(check_path) == find
sys.path.append(str(site_packages))
try:
default_library_roots = pydevd_utils._get_default_library_roots()
assert len(set(default_library_roots)) == len(default_library_roots), \
'Duplicated library roots found in: %s' % (default_library_roots,)
assert str(site_packages) in default_library_roots
for path in sys.path:
if os.path.exists(path) and path.endswith('site-packages'):
assert path in default_library_roots
finally:
sys.path.remove(str(site_packages))

View file

@ -0,0 +1,188 @@
def test_in_project_roots(tmpdir):
from _pydevd_bundle.pydevd_filtering import FilesFiltering
files_filtering = FilesFiltering()
import os.path
import sys
assert files_filtering._get_library_roots() == [
os.path.normcase(x) for x in files_filtering._get_default_library_roots()]
site_packages = tmpdir.mkdir('site-packages')
project_dir = tmpdir.mkdir('project')
project_dir_inside_site_packages = str(site_packages.mkdir('project'))
site_packages_inside_project_dir = str(project_dir.mkdir('site-packages'))
# Convert from pytest paths to str.
site_packages = str(site_packages)
project_dir = str(project_dir)
tmpdir = str(tmpdir)
# Test permutations of project dir inside site packages and vice-versa.
files_filtering.set_project_roots([project_dir, project_dir_inside_site_packages])
files_filtering.set_library_roots([site_packages, site_packages_inside_project_dir])
check = [
(tmpdir, False),
(site_packages, False),
(site_packages_inside_project_dir, False),
(project_dir, True),
(project_dir_inside_site_packages, True),
]
for (check_path, find) in check[:]:
check.append((os.path.join(check_path, 'a.py'), find))
for check_path, find in check:
assert files_filtering.in_project_roots(check_path) == find
files_filtering.set_project_roots([])
files_filtering.set_library_roots([site_packages, site_packages_inside_project_dir])
# If the IDE did not set the project roots, consider anything not in the site
# packages as being in a project root (i.e.: we can calculate default values for
# site-packages but not for project roots).
check = [
(tmpdir, True),
(site_packages, False),
(site_packages_inside_project_dir, False),
(project_dir, True),
(project_dir_inside_site_packages, False),
(os.path.join(tmpdir, '<foo>'), False),
]
for check_path, find in check:
assert files_filtering.in_project_roots(check_path) == find
sys.path.append(str(site_packages))
try:
default_library_roots = files_filtering._get_default_library_roots()
assert len(set(default_library_roots)) == len(default_library_roots), \
'Duplicated library roots found in: %s' % (default_library_roots,)
assert str(site_packages) in default_library_roots
for path in sys.path:
if os.path.exists(path) and path.endswith('site-packages'):
assert path in default_library_roots
finally:
sys.path.remove(str(site_packages))
def test_filtering(tmpdir):
from _pydevd_bundle.pydevd_filtering import FilesFiltering
from _pydevd_bundle.pydevd_filtering import ExcludeFilter
files_filtering = FilesFiltering()
site_packages = tmpdir.mkdir('site-packages')
project_dir = tmpdir.mkdir('project')
project_dir_inside_site_packages = str(site_packages.mkdir('project'))
site_packages_inside_project_dir = str(project_dir.mkdir('site-packages'))
files_filtering.set_exclude_filters([
ExcludeFilter('**/project*', True, True),
ExcludeFilter('**/bar*', False, True),
])
assert files_filtering.exclude_by_filter('/foo/project', None) is True
assert files_filtering.exclude_by_filter('/foo/unmatched', None) is None
assert files_filtering.exclude_by_filter('/foo/bar', None) is False
def test_glob_matching():
from _pydevd_bundle.pydevd_filtering import glob_matches_path
# Linux
for sep, altsep in (('\\', '/'), ('/', None)):
def build(path):
if sep == '/':
return path
else:
return ('c:' + path).replace('/', '\\')
assert glob_matches_path(build('/a'), r'*', sep, altsep)
assert not glob_matches_path(build('/a/b/c/some.py'), '/a/**/c/so?.py', sep, altsep)
assert glob_matches_path('/a/b/c', '/a/b/*')
assert not glob_matches_path('/a/b', '/*')
assert glob_matches_path('/a/b', '/*/b')
assert glob_matches_path('/a/b', '**/*')
assert not glob_matches_path('/a/b', '**/a')
assert glob_matches_path(build('/a/b/c/d'), '**/d', sep, altsep)
assert not glob_matches_path(build('/a/b/c/d'), '**/c', sep, altsep)
assert glob_matches_path(build('/a/b/c/d'), '**/c/d', sep, altsep)
assert glob_matches_path(build('/a/b/c/d'), '**/b/c/d', sep, altsep)
assert glob_matches_path(build('/a/b/c/d'), '/*/b/*/d', sep, altsep)
assert glob_matches_path(build('/a/b/c/d'), '**/c/*', sep, altsep)
assert glob_matches_path(build('/a/b/c/d'), '/a/**/c/*', sep, altsep)
assert glob_matches_path(build('/a/b/c/d.py'), '/a/**/c/*', sep, altsep)
assert glob_matches_path(build('/a/b/c/d.py'), '/a/**/c/*.py', sep, altsep)
assert glob_matches_path(build('/a/b/c/some.py'), '/a/**/c/so*.py', sep, altsep)
assert glob_matches_path(build('/a/b/c/some.py'), '/a/**/c/som?.py', sep, altsep)
assert glob_matches_path(build('/a/b/c/d'), '/**', sep, altsep)
assert glob_matches_path(build('/a/b/c/d'), '/**/d', sep, altsep)
assert glob_matches_path(build('/a/b/c/d.py'), '/**/*.py', sep, altsep)
assert glob_matches_path(build('/a/b/c/d.py'), '**/c/*.py', sep, altsep)
# Expected not to match.
assert not glob_matches_path(build('/a/b/c/d'), '/**/d.py', sep, altsep)
assert not glob_matches_path(build('/a/b/c/d.pyx'), '/a/**/c/*.py', sep, altsep)
assert not glob_matches_path(build('/a/b/c/d'), '/*/d', sep, altsep)
if sep == '/':
assert not glob_matches_path(build('/a/b/c/d'), r'**\d', sep, altsep) # Match with \ doesn't work on linux...
assert not glob_matches_path(build('/a/b/c/d'), r'c:\**\d', sep, altsep) # Match with drive doesn't work on linux...
else:
# Works in Windows.
assert glob_matches_path(build('/a/b/c/d'), r'**\d', sep, altsep)
assert glob_matches_path(build('/a/b/c/d'), r'c:\**\d', sep, altsep)
# Corner cases
assert not glob_matches_path(build('/'), r'', sep, altsep)
assert glob_matches_path(build(''), r'', sep, altsep)
assert not glob_matches_path(build(''), r'**', sep, altsep)
assert glob_matches_path(build('/'), r'**', sep, altsep)
assert glob_matches_path(build('/'), r'*', sep, altsep)
def test_rules_to_exclude_filter(tmpdir):
from _pydevd_bundle.pydevd_process_net_command_json import _convert_rules_to_exclude_filters
from _pydevd_bundle.pydevd_filtering import ExcludeFilter
from random import shuffle
dira = tmpdir.mkdir('a')
dirb = dira.mkdir('b')
fileb = dirb.join('fileb.py')
fileb2 = dirb.join('fileb2.py')
with fileb.open('w') as stream:
stream.write('')
def filename_to_server(filename):
return filename
def on_error(msg):
raise AssertionError(msg)
rules = [
{'path': str(dira), 'include': False},
{'path': str(dirb), 'include': True},
{'path': str(fileb), 'include': True},
{'path': str(fileb2), 'include': True},
{'path': '**/foo/*.py', 'include': True},
{'module': 'bar', 'include': False},
{'module': 'bar.foo', 'include': True},
]
shuffle(rules)
exclude_filters = _convert_rules_to_exclude_filters(rules, filename_to_server, on_error)
assert exclude_filters == [
ExcludeFilter(name=str(fileb2), exclude=False, is_path=True),
ExcludeFilter(name=str(fileb), exclude=False, is_path=True),
ExcludeFilter(name=str(dirb) + '/**', exclude=False, is_path=True),
ExcludeFilter(name=str(dira) + '/**', exclude=True, is_path=True),
ExcludeFilter(name='**/foo/*.py', exclude=False, is_path=True),
ExcludeFilter(name='bar.foo', exclude=False, is_path=False),
ExcludeFilter(name='bar', exclude=True, is_path=False),
]