Drop support for Python 2.7.

This commit is contained in:
Fabio Zadrozny 2022-03-11 10:02:17 -03:00
parent eb7b7bb2ea
commit 328d4026b1
187 changed files with 760 additions and 25414 deletions

View file

@ -2,20 +2,16 @@
License: Apache 2.0
Author: Yuli Fitterman
'''
# noinspection PyBroadException
import types
from _pydevd_bundle.pydevd_constants import IS_JYTHON, IS_PY3K
from _pydevd_bundle.pydevd_constants import IS_JYTHON
try:
import inspect
except:
try:
from _pydev_imps import _pydev_inspect as inspect
except:
import traceback;
import traceback;
traceback.print_exc() # Ok, no inspect available (search will not work)from _pydevd_bundle.pydevd_constants import IS_JYTHON, IS_PY3K
traceback.print_exc() # Ok, no inspect available (search will not work)
from _pydev_bundle._pydev_imports_tipper import signature_from_docstring
@ -57,7 +53,7 @@ def get_description(obj):
fn_name = None
fn_class = None
if isinstance(fob, (types.FunctionType, types.MethodType)):
spec_info = inspect.getfullargspec(fob) if IS_PY3K else inspect.getargspec(fob)
spec_info = inspect.getfullargspec(fob)
argspec = inspect.formatargspec(*spec_info)
fn_name = getattr(fob, '__name__', None)
if isinstance(obj, type) or type(obj).__name__ == 'classobj':
@ -141,6 +137,7 @@ def create_class_stub(class_name, contents):
def create_function_stub(fn_name, fn_argspec, fn_docstring, indent=0):
def shift_right(string, prefix):
return ''.join(prefix + line for line in string.splitlines(True))

View file

@ -2,13 +2,9 @@ from collections import namedtuple
from string import ascii_letters, digits
from _pydevd_bundle import pydevd_xml
from _pydevd_bundle.pydevd_constants import IS_PY2
import pydevconsole
if IS_PY2:
import __builtin__
else:
import builtins as __builtin__ # Py3
import builtins as __builtin__ # Py3
try:
import java.lang # @UnusedImport
@ -192,8 +188,6 @@ def completions_to_xml(completions):
msg = ["<xml>"]
for comp in completions:
if IS_PY2:
comp = [(x.encode('utf-8') if x.__class__ == unicode else x) for x in comp]
msg.append('<comp p0="')
msg.append(valid_xml(quote(comp[0], '/>_= \t')))
msg.append('" p1="')
@ -211,31 +205,13 @@ def completions_to_xml(completions):
identifier_start = ascii_letters + '_'
identifier_part = ascii_letters + '_' + digits
if IS_PY2:
identifier_start = identifier_start.decode('utf-8')
identifier_part = identifier_part.decode('utf-8')
identifier_start = set(identifier_start)
identifier_part = set(identifier_part)
if IS_PY2:
# There's no string.isidentifier() on py2.
def isidentifier(s):
if not s:
return False
if s[0] not in identifier_start:
return False
def isidentifier(s):
return s.isidentifier()
for c in s[1:]:
if c not in identifier_part:
return False
return True
else:
def isidentifier(s):
return s.isidentifier()
TokenAndQualifier = namedtuple('TokenAndQualifier', 'token, qualifier')

View file

@ -3,29 +3,17 @@ import os.path
import sys
from _pydev_bundle._pydev_tipper_common import do_find
from _pydevd_bundle.pydevd_constants import IS_PY2
from _pydevd_bundle.pydevd_utils import hasattr_checked, dir_checked
if IS_PY2:
from inspect import getargspec as _originalgetargspec
from inspect import getfullargspec
def getargspec(*args, **kwargs):
ret = list(_originalgetargspec(*args, **kwargs))
ret.append([])
ret.append({})
return ret
else:
from inspect import getfullargspec
def getargspec(*args, **kwargs):
arg_spec = getfullargspec(*args, **kwargs)
return arg_spec.args, arg_spec.varargs, arg_spec.varkw, arg_spec.defaults, arg_spec.kwonlyargs or [], arg_spec.kwonlydefaults or {}
def getargspec(*args, **kwargs):
arg_spec = getfullargspec(*args, **kwargs)
return arg_spec.args, arg_spec.varargs, arg_spec.varkw, arg_spec.defaults, arg_spec.kwonlyargs or [], arg_spec.kwonlydefaults or {}
try:
xrange
except:
xrange = range
xrange = range
# completion types.
TYPE_IMPORT = '0'

View file

@ -1,28 +1,19 @@
try:
import StringIO
except:
import io as StringIO
import traceback
from java.lang import StringBuffer #@UnresolvedImport
from java.lang import String #@UnresolvedImport
import java.lang #@UnresolvedImport
from io import StringIO
from java.lang import StringBuffer # @UnresolvedImport
from java.lang import String # @UnresolvedImport
import java.lang # @UnresolvedImport
import sys
from _pydev_bundle._pydev_tipper_common import do_find
from org.python.core import PyReflectedFunction # @UnresolvedImport
from org.python.core import PyReflectedFunction #@UnresolvedImport
from org.python import core # @UnresolvedImport
from org.python.core import PyClass # @UnresolvedImport
from org.python import core #@UnresolvedImport
from org.python.core import PyClass #@UnresolvedImport
xrange = range
try:
xrange
except:
xrange = range
#completion types.
# completion types.
TYPE_IMPORT = '0'
TYPE_CLASS = '1'
TYPE_FUNCTION = '2'
@ -30,6 +21,7 @@ TYPE_ATTR = '3'
TYPE_BUILTIN = '4'
TYPE_PARAM = '5'
def _imp(name):
try:
return __import__(name)
@ -41,9 +33,11 @@ def _imp(name):
s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path)
raise RuntimeError(s)
import java.util
_java_rt_file = getattr(java.util, '__file__', None)
def Find(name):
f = None
if name.startswith('__builtin__'):
@ -61,14 +55,13 @@ def Find(name):
except:
f = None
components = name.split('.')
old_comp = None
for comp in components[1:]:
try:
#this happens in the following case:
#we have mx.DateTime.mxDateTime.mxDateTime.pyd
#but after importing it, mx.DateTime.mxDateTime does shadows access to mxDateTime.pyd
# this happens in the following case:
# we have mx.DateTime.mxDateTime.mxDateTime.pyd
# but after importing it, mx.DateTime.mxDateTime does shadows access to mxDateTime.pyd
mod = getattr(mod, comp)
except AttributeError:
if old_comp != comp:
@ -82,7 +75,7 @@ def Find(name):
foundAs = foundAs + comp
old_comp = comp
if f is None and name.startswith('java.lang'):
# Hack: java.lang.__file__ is None on Jython 2.7 (whereas it pointed to rt.jar on Jython 2.5).
f = _java_rt_file
@ -94,9 +87,10 @@ def Find(name):
f = f[:-len('$py.class')] + '.py'
return f, mod, parent, foundAs
def format_param_class_name(paramClassName):
if paramClassName.startswith('<type \'') and paramClassName.endswith('\'>'):
paramClassName = paramClassName[len('<type \''): -2]
paramClassName = paramClassName[len('<type \''):-2]
if paramClassName.startswith('['):
if paramClassName == '[C':
paramClassName = 'char[]'
@ -131,10 +125,10 @@ class Info:
def __init__(self, name, **kwargs):
self.name = name
self.doc = kwargs.get('doc', None)
self.args = kwargs.get('args', ()) #tuple of strings
self.varargs = kwargs.get('varargs', None) #string
self.kwargs = kwargs.get('kwargs', None) #string
self.ret = kwargs.get('ret', None) #string
self.args = kwargs.get('args', ()) # tuple of strings
self.varargs = kwargs.get('varargs', None) # string
self.kwargs = kwargs.get('kwargs', None) # string
self.ret = kwargs.get('ret', None) # string
def basic_as_str(self):
'''@returns this class information as a string (just basic format)
@ -147,7 +141,6 @@ class Info:
(self.name, args, self.varargs, self.kwargs, self.doc)
return s
def get_as_doc(self):
s = str(self.name)
if self.doc:
@ -173,9 +166,11 @@ class Info:
return str(s)
def isclass(cls):
return isinstance(cls, core.PyClass) or type(cls) == java.lang.Class
def ismethod(func):
'''this function should return the information gathered on a function
@ -189,8 +184,8 @@ def ismethod(func):
try:
if isinstance(func, core.PyFunction):
#ok, this is from python, created by jython
#print_ ' PyFunction'
# ok, this is from python, created by jython
# print_ ' PyFunction'
def getargs(func_code):
"""Get information about the arguments accepted by a code object.
@ -225,29 +220,29 @@ def ismethod(func):
return 1, [Info(func.func_name, args=args[0], varargs=args[1], kwargs=args[2], doc=func.func_doc)]
if isinstance(func, core.PyMethod):
#this is something from java itself, and jython just wrapped it...
# this is something from java itself, and jython just wrapped it...
#things to play in func:
#['__call__', '__class__', '__cmp__', '__delattr__', '__dir__', '__doc__', '__findattr__', '__name__', '_doget', 'im_class',
#'im_func', 'im_self', 'toString']
#print_ ' PyMethod'
#that's the PyReflectedFunction... keep going to get it
# things to play in func:
# ['__call__', '__class__', '__cmp__', '__delattr__', '__dir__', '__doc__', '__findattr__', '__name__', '_doget', 'im_class',
# 'im_func', 'im_self', 'toString']
# print_ ' PyMethod'
# that's the PyReflectedFunction... keep going to get it
func = func.im_func
if isinstance(func, PyReflectedFunction):
#this is something from java itself, and jython just wrapped it...
# this is something from java itself, and jython just wrapped it...
#print_ ' PyReflectedFunction'
# print_ ' PyReflectedFunction'
infos = []
for i in xrange(len(func.argslist)):
#things to play in func.argslist[i]:
# things to play in func.argslist[i]:
#'PyArgsCall', 'PyArgsKeywordsCall', 'REPLACE', 'StandardCall', 'args', 'compare', 'compareTo', 'data', 'declaringClass'
#'flags', 'isStatic', 'matches', 'precedence']
# 'PyArgsCall', 'PyArgsKeywordsCall', 'REPLACE', 'StandardCall', 'args', 'compare', 'compareTo', 'data', 'declaringClass'
# 'flags', 'isStatic', 'matches', 'precedence']
#print_ ' ', func.argslist[i].data.__class__
#func.argslist[i].data.__class__ == java.lang.reflect.Method
# print_ ' ', func.argslist[i].data.__class__
# func.argslist[i].data.__class__ == java.lang.reflect.Method
if func.argslist[i]:
met = func.argslist[i].data
@ -268,36 +263,36 @@ def ismethod(func):
paramClassName = paramTypesClass.getName(paramTypesClass)
except AttributeError:
try:
paramClassName = repr(paramTypesClass) #should be something like <type 'object'>
paramClassName = repr(paramTypesClass) # should be something like <type 'object'>
paramClassName = paramClassName.split('\'')[1]
except:
paramClassName = repr(paramTypesClass) #just in case something else happens... it will at least be visible
#if the parameter equals [C, it means it it a char array, so, let's change it
paramClassName = repr(paramTypesClass) # just in case something else happens... it will at least be visible
# if the parameter equals [C, it means it it a char array, so, let's change it
a = format_param_class_name(paramClassName)
#a = a.replace('[]','Array')
#a = a.replace('Object', 'obj')
#a = a.replace('String', 's')
#a = a.replace('Integer', 'i')
#a = a.replace('Char', 'c')
#a = a.replace('Double', 'd')
args.append(a) #so we don't leave invalid code
# a = a.replace('[]','Array')
# a = a.replace('Object', 'obj')
# a = a.replace('String', 's')
# a = a.replace('Integer', 'i')
# a = a.replace('Char', 'c')
# a = a.replace('Double', 'd')
args.append(a) # so we don't leave invalid code
info = Info(name, args=args, ret=ret)
#print_ info.basic_as_str()
# print_ info.basic_as_str()
infos.append(info)
return 1, infos
except Exception:
s = StringIO.StringIO()
s = StringIO()
traceback.print_exc(file=s)
return 1, [Info(str('ERROR'), doc=s.getvalue())]
return 0, None
def ismodule(mod):
#java modules... do we have other way to know that?
# java modules... do we have other way to know that?
if not hasattr(mod, 'getClass') and not hasattr(mod, '__class__') \
and hasattr(mod, '__name__'):
return 1
@ -312,20 +307,20 @@ def dir_obj(obj):
if hasattr(obj, '__class__'):
if obj.__class__ == java.lang.Class:
#get info about superclasses
# get info about superclasses
classes = []
classes.append(obj)
try:
c = obj.getSuperclass()
except TypeError:
#may happen on jython when getting the java.lang.Class class
# may happen on jython when getting the java.lang.Class class
c = obj.getSuperclass(obj)
while c != None:
classes.append(c)
c = c.getSuperclass()
#get info about interfaces
# get info about interfaces
interfs = []
for obj in classes:
try:
@ -334,7 +329,7 @@ def dir_obj(obj):
interfs.extend(obj.getInterfaces(obj))
classes.extend(interfs)
#now is the time when we actually get info on the declared methods and fields
# now is the time when we actually get info on the declared methods and fields
for obj in classes:
try:
declaredMethods = obj.getDeclaredMethods()
@ -356,17 +351,15 @@ def dir_obj(obj):
ret.append(name)
found.put(name, 1)
elif isclass(obj.__class__):
d = dir(obj.__class__)
for name in d:
ret.append(name)
found.put(name, 1)
#this simple dir does not always get all the info, that's why we have the part before
#(e.g.: if we do a dir on String, some methods that are from other interfaces such as
#charAt don't appear)
# this simple dir does not always get all the info, that's why we have the part before
# (e.g.: if we do a dir on String, some methods that are from other interfaces such as
# charAt don't appear)
d = dir(original)
for name in d:
if found.get(name) != 1:
@ -393,7 +386,6 @@ def format_arg(arg):
return s
def search_definition(data):
'''@return file, line, col
'''
@ -437,36 +429,36 @@ def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=get
try:
obj = getattr(obj_to_complete, d)
except (AttributeError, java.lang.NoClassDefFoundError):
#jython has a bug in its custom classloader that prevents some things from working correctly, so, let's see if
#we can fix that... (maybe fixing it in jython itself would be a better idea, as this is clearly a bug)
#for that we need a custom classloader... we have references from it in the below places:
# jython has a bug in its custom classloader that prevents some things from working correctly, so, let's see if
# we can fix that... (maybe fixing it in jython itself would be a better idea, as this is clearly a bug)
# for that we need a custom classloader... we have references from it in the below places:
#
#http://mindprod.com/jgloss/classloader.html
#http://www.javaworld.com/javaworld/jw-03-2000/jw-03-classload-p2.html
#http://freshmeat.net/articles/view/1643/
# http://mindprod.com/jgloss/classloader.html
# http://www.javaworld.com/javaworld/jw-03-2000/jw-03-classload-p2.html
# http://freshmeat.net/articles/view/1643/
#
#note: this only happens when we add things to the sys.path at runtime, if they are added to the classpath
#before the run, everything goes fine.
# note: this only happens when we add things to the sys.path at runtime, if they are added to the classpath
# before the run, everything goes fine.
#
#The code below ilustrates what I mean...
# The code below ilustrates what I mean...
#
#import sys
#sys.path.insert(1, r"C:\bin\eclipse310\plugins\org.junit_3.8.1\junit.jar" )
# import sys
# sys.path.insert(1, r"C:\bin\eclipse310\plugins\org.junit_3.8.1\junit.jar" )
#
#import junit.framework
#print_ dir(junit.framework) #shows the TestCase class here
# import junit.framework
# print_ dir(junit.framework) #shows the TestCase class here
#
#import junit.framework.TestCase
# import junit.framework.TestCase
#
#raises the error:
#Traceback (innermost last):
# raises the error:
# Traceback (innermost last):
# File "<console>", line 1, in ?
#ImportError: No module named TestCase
# ImportError: No module named TestCase
#
#whereas if we had added the jar to the classpath before, everything would be fine by now...
# whereas if we had added the jar to the classpath before, everything would be fine by now...
ret.append((d, '', '', retType))
#that's ok, private things cannot be gotten...
# that's ok, private things cannot be gotten...
continue
else:
@ -494,10 +486,9 @@ def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=get
elif ismodule(obj):
retType = TYPE_IMPORT
#add token and doc to return - assure only strings.
# add token and doc to return - assure only strings.
ret.append((d, doc, args, retType))
return ret

View file

@ -1,28 +1,24 @@
import traceback
import sys
try:
import StringIO
except:
import io as StringIO #Python 3.0
from io import StringIO
class Log:
def __init__(self):
self._contents = []
def add_content(self, *content):
self._contents.append(' '.join(content))
def add_exception(self):
s = StringIO.StringIO()
s = StringIO()
exc_info = sys.exc_info()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], limit=None, file=s)
self._contents.append(s.getvalue())
def get_contents(self):
return '\n'.join(self._contents)
def clear_log(self):
del self._contents[:]
del self._contents[:]

View file

@ -1,22 +1,9 @@
try:
import inspect
except:
try:
from _pydev_imps import _pydev_inspect as inspect
except:
import traceback;traceback.print_exc() #Ok, no inspect available (search will not work)
try:
import re
except:
try:
import sre as re # for older versions
except:
import traceback;traceback.print_exc() #Ok, no inspect available (search will not work)
import inspect
import re
from _pydevd_bundle.pydevd_constants import xrange
def do_find(f, mod):
import linecache
if inspect.ismodule(mod):
@ -40,7 +27,7 @@ def do_find(f, mod):
try:
mod = mod.func_code
except AttributeError:
mod = mod.__code__ #python 3k
mod = mod.__code__ # python 3k
if inspect.istraceback(mod):
mod = mod.tb_frame

View file

@ -5,19 +5,13 @@ from _pydev_bundle.pydev_imports import xmlrpclib, _queue, Exec
from _pydev_bundle._pydev_calltip_util import get_description
from _pydevd_bundle import pydevd_vars
from _pydevd_bundle import pydevd_xml
from _pydevd_bundle.pydevd_constants import (IS_JYTHON, dict_iter_items, NEXT_VALUE_SEPARATOR, get_global_debugger,
from _pydevd_bundle.pydevd_constants import (IS_JYTHON, NEXT_VALUE_SEPARATOR, get_global_debugger,
silence_warnings_decorator)
from contextlib import contextmanager
from _pydev_bundle import pydev_log
from _pydevd_bundle.pydevd_utils import interrupt_main_thread
try:
import cStringIO as StringIO # may not always be available @UnusedImport
except:
try:
import StringIO # @Reimport
except:
import io as StringIO
from io import StringIO
# =======================================================================================================================
@ -440,7 +434,7 @@ class BaseInterpreterInterface:
return True
def getFrame(self):
xml = StringIO.StringIO()
xml = StringIO()
hidden_ns = self.get_ipython_hidden_vars_dict()
xml.write("<xml>")
xml.write(pydevd_xml.frame_vars_to_xml(self.get_namespace(), hidden_ns))
@ -450,13 +444,13 @@ class BaseInterpreterInterface:
@silence_warnings_decorator
def getVariable(self, attributes):
xml = StringIO.StringIO()
xml = StringIO()
xml.write("<xml>")
val_dict = pydevd_vars.resolve_compound_var_object_fields(self.get_namespace(), attributes)
if val_dict is None:
val_dict = {}
for k, val in dict_iter_items(val_dict):
for k, val in val_dict.items():
val = val_dict[k]
evaluate_full_value = pydevd_xml.should_evaluate_full_value(val)
xml.write(pydevd_vars.var_to_xml(val, k, evaluate_full_value=evaluate_full_value))
@ -471,7 +465,7 @@ class BaseInterpreterInterface:
return pydevd_vars.table_like_struct_to_xml(array, name, roffset, coffset, rows, cols, format)
def evaluate(self, expression):
xml = StringIO.StringIO()
xml = StringIO()
xml.write("<xml>")
result = pydevd_vars.eval_in_context(expression, self.get_namespace(), self.get_namespace())
xml.write(pydevd_vars.var_to_xml(result, expression))
@ -536,7 +530,7 @@ class BaseInterpreterInterface:
debugger_options = {}
env_key = "PYDEVD_EXTRA_ENVS"
if env_key in debugger_options:
for (env_name, value) in dict_iter_items(debugger_options[env_key]):
for (env_name, value) in debugger_options[env_key].items():
existing_value = os.environ.get(env_name, None)
if existing_value:
os.environ[env_name] = "%s%c%s" % (existing_value, os.path.pathsep, value)

View file

@ -1,47 +1,26 @@
from _pydevd_bundle.pydevd_constants import USE_LIB_COPY, izip
try:
try:
if USE_LIB_COPY:
from _pydev_imps._pydev_saved_modules import xmlrpclib
else:
import xmlrpclib
except ImportError:
import xmlrpc.client as xmlrpclib
if USE_LIB_COPY:
from _pydev_imps._pydev_saved_modules import xmlrpclib
else:
import xmlrpclib
except ImportError:
from _pydev_imps import _pydev_xmlrpclib as xmlrpclib
import xmlrpc.client as xmlrpclib
try:
try:
if USE_LIB_COPY:
from _pydev_imps._pydev_saved_modules import _pydev_SimpleXMLRPCServer
from _pydev_SimpleXMLRPCServer import SimpleXMLRPCServer
else:
from SimpleXMLRPCServer import SimpleXMLRPCServer
except ImportError:
from xmlrpc.server import SimpleXMLRPCServer
except ImportError:
from _pydev_imps._pydev_SimpleXMLRPCServer import SimpleXMLRPCServer
if USE_LIB_COPY:
from _pydev_imps._pydev_saved_modules import xmlrpcserver
SimpleXMLRPCServer = xmlrpcserver.SimpleXMLRPCServer
else:
from xmlrpc.server import SimpleXMLRPCServer
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from io import StringIO
try:
execfile = execfile # Not in Py3k
except NameError:
from _pydev_imps._pydev_execfile import execfile
from _pydev_imps._pydev_execfile import execfile
from _pydev_imps._pydev_saved_modules import _queue
try:
from _pydevd_bundle.pydevd_exec import Exec
except:
from _pydevd_bundle.pydevd_exec2 import Exec
from _pydevd_bundle.pydevd_exec2 import Exec
try:
from urllib import quote, quote_plus, unquote_plus
except:
from urllib.parse import quote, quote_plus, unquote_plus # @UnresolvedImport
from urllib.parse import quote, quote_plus, unquote_plus # @UnresolvedImport

View file

@ -1,14 +1,13 @@
import sys
from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface
import os
import traceback
# Uncomment to force PyDev standard shell.
# raise ImportError()
from _pydev_bundle.pydev_ipython_console_011 import get_pydev_frontend
from _pydevd_bundle.pydevd_constants import dict_iter_items
#=======================================================================================================================
# InterpreterInterface
@ -48,11 +47,9 @@ class InterpreterInterface(BaseInterpreterInterface):
return res
def get_namespace(self):
return self.interpreter.get_namespace()
def getCompletions(self, text, act_tok):
return self.interpreter.getCompletions(text, act_tok)
@ -61,8 +58,8 @@ class InterpreterInterface(BaseInterpreterInterface):
def notify_about_magic(self):
if not self.notification_succeeded:
self.notification_tries+=1
if self.notification_tries>self.notification_max_tries:
self.notification_tries += 1
if self.notification_tries > self.notification_max_tries:
return
completions = self.getCompletions("%", "%")
magic_commands = [x[0] for x in completions]
@ -73,7 +70,7 @@ class InterpreterInterface(BaseInterpreterInterface):
try:
server.NotifyAboutMagic(magic_commands, self.interpreter.is_automagic())
self.notification_succeeded = True
except :
except:
self.notification_succeeded = False
def get_ipython_hidden_vars_dict(self):
@ -85,7 +82,7 @@ class InterpreterInterface(BaseInterpreterInterface):
user_hidden_dict = user_ns_hidden.copy()
else:
# In IPython 1.x `user_ns_hidden` used to be a set with names of hidden variables
user_hidden_dict = dict([(key, val) for key, val in dict_iter_items(self.interpreter.ipython.user_ns)
user_hidden_dict = dict([(key, val) for key, val in self.interpreter.ipython.user_ns.items()
if key in user_ns_hidden])
# while `_`, `__` and `___` were not initialized, they are not presented in `user_ns_hidden`

View file

@ -34,7 +34,6 @@ except ImportError:
from IPython.core import release
from _pydev_bundle.pydev_imports import xmlrpclib
from _pydevd_bundle.pydevd_constants import dict_keys
default_pydev_banner_parts = default_banner_parts
@ -364,9 +363,9 @@ class _PyDevFrontEnd:
def update(self, globals, locals):
ns = self.ipython.user_ns
for key in dict_keys(self.ipython.user_ns):
for key, value in list(ns.items()):
if key not in locals:
locals[key] = ns[key]
locals[key] = value
self.ipython.user_global_ns.clear()
self.ipython.user_global_ns.update(globals)

View file

@ -1,10 +1,11 @@
from _pydevd_bundle import pydevd_constants
from _pydev_imps._pydev_saved_modules import socket
import sys
IS_JYTHON = sys.platform.find('java') != -1
_cache = None
def get_localhost():
'''
Should return 127.0.0.1 in ipv4 and ::1 in ipv6
@ -48,17 +49,19 @@ def get_socket_names(n_sockets, close=False):
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((get_localhost(), 0))
socket_name = sock.getsockname()
sockets.append(sock)
socket_names.append(socket_name)
if close:
for s in sockets:
s.close()
return socket_names
def get_socket_name(close=False):
return get_socket_names(1, close)[0]
if __name__ == '__main__':
print(get_socket_name())
print(get_socket_name())

View file

@ -4,7 +4,7 @@ import re
import sys
from _pydev_imps._pydev_saved_modules import threading
from _pydevd_bundle.pydevd_constants import get_global_debugger, IS_WINDOWS, IS_JYTHON, get_current_thread_id, \
sorted_dict_repr, IS_PY2
sorted_dict_repr
from _pydev_bundle import pydev_log
from contextlib import contextmanager
from _pydevd_bundle import pydevd_constants
@ -278,9 +278,6 @@ def remove_quotes_from_args(args):
for x in args:
if Path is not None and isinstance(x, Path):
x = str(x)
elif IS_PY2:
if not isinstance(x, (str, unicode)):
raise InvalidTypeInArgsException(str(type(x)))
else:
if not isinstance(x, (bytes, str)):
raise InvalidTypeInArgsException(str(type(x)))
@ -298,9 +295,6 @@ def remove_quotes_from_args(args):
for x in args:
if Path is not None and isinstance(x, Path):
x = x.as_posix()
elif IS_PY2:
if not isinstance(x, (str, unicode)):
raise InvalidTypeInArgsException(str(type(x)))
else:
if not isinstance(x, (bytes, str)):
raise InvalidTypeInArgsException(str(type(x)))

View file

@ -1,604 +0,0 @@
"""HTTP server base class.
Note: the class in this module doesn't implement any HTTP request; see
SimpleHTTPServer for simple implementations of GET, HEAD and POST
(including CGI scripts). It does, however, optionally implement HTTP/1.1
persistent connections, as of version 0.3.
Contents:
- BaseHTTPRequestHandler: HTTP request handler base class
- test: test function
XXX To do:
- log requests even later (to capture byte count)
- log user-agent header and other interesting goodies
- send error log to separate file
"""
# See also:
#
# HTTP Working Group T. Berners-Lee
# INTERNET-DRAFT R. T. Fielding
# <draft-ietf-http-v10-spec-00.txt> H. Frystyk Nielsen
# Expires September 8, 1995 March 8, 1995
#
# URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt
#
# and
#
# Network Working Group R. Fielding
# Request for Comments: 2616 et al
# Obsoletes: 2068 June 1999
# Category: Standards Track
#
# URL: http://www.faqs.org/rfcs/rfc2616.html
# Log files
# ---------
#
# Here's a quote from the NCSA httpd docs about log file format.
#
# | The logfile format is as follows. Each line consists of:
# |
# | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" ddd bbbb
# |
# | host: Either the DNS name or the IP number of the remote client
# | rfc931: Any information returned by identd for this person,
# | - otherwise.
# | authuser: If user sent a userid for authentication, the user name,
# | - otherwise.
# | DD: Day
# | Mon: Month (calendar name)
# | YYYY: Year
# | hh: hour (24-hour format, the machine's timezone)
# | mm: minutes
# | ss: seconds
# | request: The first line of the HTTP request as sent by the client.
# | ddd: the status code returned by the server, - if not available.
# | bbbb: the total number of bytes sent,
# | *not including the HTTP/1.0 header*, - if not available
# |
# | You can determine the name of the file accessed through request.
#
# (Actually, the latter is only true if you know the server configuration
# at the time the request was made!)
__version__ = "0.3"
__all__ = ["HTTPServer", "BaseHTTPRequestHandler"]
import sys
from _pydev_imps._pydev_saved_modules import time
from _pydev_imps._pydev_saved_modules import socket
from warnings import filterwarnings, catch_warnings
with catch_warnings():
if sys.py3kwarning:
filterwarnings("ignore", ".*mimetools has been removed",
DeprecationWarning)
import mimetools
from _pydev_imps import _pydev_SocketServer as SocketServer
# Default error message template
DEFAULT_ERROR_MESSAGE = """\
<head>
<title>Error response</title>
</head>
<body>
<h1>Error response</h1>
<p>Error code %(code)d.
<p>Message: %(message)s.
<p>Error code explanation: %(code)s = %(explain)s.
</body>
"""
DEFAULT_ERROR_CONTENT_TYPE = "text/html"
def _quote_html(html):
return html.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
class HTTPServer(SocketServer.TCPServer):
allow_reuse_address = 1 # Seems to make sense in testing environment
def server_bind(self):
"""Override server_bind to store the server name."""
SocketServer.TCPServer.server_bind(self)
host, port = self.socket.getsockname()[:2]
self.server_name = socket.getfqdn(host)
self.server_port = port
class BaseHTTPRequestHandler(SocketServer.StreamRequestHandler):
"""HTTP request handler base class.
The following explanation of HTTP serves to guide you through the
code as well as to expose any misunderstandings I may have about
HTTP (so you don't need to read the code to figure out I'm wrong
:-).
HTTP (HyperText Transfer Protocol) is an extensible protocol on
top of a reliable stream transport (e.g. TCP/IP). The protocol
recognizes three parts to a request:
1. One line identifying the request type and path
2. An optional set of RFC-822-style headers
3. An optional data part
The headers and data are separated by a blank line.
The first line of the request has the form
<command> <path> <version>
where <command> is a (case-sensitive) keyword such as GET or POST,
<path> is a string containing path information for the request,
and <version> should be the string "HTTP/1.0" or "HTTP/1.1".
<path> is encoded using the URL encoding scheme (using %xx to signify
the ASCII character with hex code xx).
The specification specifies that lines are separated by CRLF but
for compatibility with the widest range of clients recommends
servers also handle LF. Similarly, whitespace in the request line
is treated sensibly (allowing multiple spaces between components
and allowing trailing whitespace).
Similarly, for output, lines ought to be separated by CRLF pairs
but most clients grok LF characters just fine.
If the first line of the request has the form
<command> <path>
(i.e. <version> is left out) then this is assumed to be an HTTP
0.9 request; this form has no optional headers and data part and
the reply consists of just the data.
The reply form of the HTTP 1.x protocol again has three parts:
1. One line giving the response code
2. An optional set of RFC-822-style headers
3. The data
Again, the headers and data are separated by a blank line.
The response code line has the form
<version> <responsecode> <responsestring>
where <version> is the protocol version ("HTTP/1.0" or "HTTP/1.1"),
<responsecode> is a 3-digit response code indicating success or
failure of the request, and <responsestring> is an optional
human-readable string explaining what the response code means.
This server parses the request and the headers, and then calls a
function specific to the request type (<command>). Specifically,
a request SPAM will be handled by a method do_SPAM(). If no
such method exists the server sends an error response to the
client. If it exists, it is called with no arguments:
do_SPAM()
Note that the request name is case sensitive (i.e. SPAM and spam
are different requests).
The various request details are stored in instance variables:
- client_address is the client IP address in the form (host,
port);
- command, path and version are the broken-down request line;
- headers is an instance of mimetools.Message (or a derived
class) containing the header information;
- rfile is a file object open for reading positioned at the
start of the optional input data part;
- wfile is a file object open for writing.
IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING!
The first thing to be written must be the response line. Then
follow 0 or more header lines, then a blank line, and then the
actual data (if any). The meaning of the header lines depends on
the command executed by the server; in most cases, when data is
returned, there should be at least one header line of the form
Content-type: <type>/<subtype>
where <type> and <subtype> should be registered MIME types,
e.g. "text/html" or "text/plain".
"""
# The Python system version, truncated to its first component.
sys_version = "Python/" + sys.version.split()[0]
# The server software version. You may want to override this.
# The format is multiple whitespace-separated strings,
# where each string is of the form name[/version].
server_version = "BaseHTTP/" + __version__
# The default request version. This only affects responses up until
# the point where the request line is parsed, so it mainly decides what
# the client gets back when sending a malformed request line.
# Most web servers default to HTTP 0.9, i.e. don't send a status line.
default_request_version = "HTTP/0.9"
def parse_request(self):
"""Parse a request (internal).
The request should be stored in self.raw_requestline; the results
are in self.command, self.path, self.request_version and
self.headers.
Return True for success, False for failure; on failure, an
error is sent back.
"""
self.command = None # set in case of error on the first line
self.request_version = version = self.default_request_version
self.close_connection = 1
requestline = self.raw_requestline
requestline = requestline.rstrip('\r\n')
self.requestline = requestline
words = requestline.split()
if len(words) == 3:
command, path, version = words
if version[:5] != 'HTTP/':
self.send_error(400, "Bad request version (%r)" % version)
return False
try:
base_version_number = version.split('/', 1)[1]
version_number = base_version_number.split(".")
# RFC 2145 section 3.1 says there can be only one "." and
# - major and minor numbers MUST be treated as
# separate integers;
# - HTTP/2.4 is a lower version than HTTP/2.13, which in
# turn is lower than HTTP/12.3;
# - Leading zeros MUST be ignored by recipients.
if len(version_number) != 2:
raise ValueError
version_number = int(version_number[0]), int(version_number[1])
except (ValueError, IndexError):
self.send_error(400, "Bad request version (%r)" % version)
return False
if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1":
self.close_connection = 0
if version_number >= (2, 0):
self.send_error(505,
"Invalid HTTP Version (%s)" % base_version_number)
return False
elif len(words) == 2:
command, path = words
self.close_connection = 1
if command != 'GET':
self.send_error(400,
"Bad HTTP/0.9 request type (%r)" % command)
return False
elif not words:
return False
else:
self.send_error(400, "Bad request syntax (%r)" % requestline)
return False
self.command, self.path, self.request_version = command, path, version
# Examine the headers and look for a Connection directive
self.headers = self.MessageClass(self.rfile, 0)
conntype = self.headers.get('Connection', "")
if conntype.lower() == 'close':
self.close_connection = 1
elif (conntype.lower() == 'keep-alive' and
self.protocol_version >= "HTTP/1.1"):
self.close_connection = 0
return True
def handle_one_request(self):
"""Handle a single HTTP request.
You normally don't need to override this method; see the class
__doc__ string for information on how to handle specific HTTP
commands such as GET and POST.
"""
try:
self.raw_requestline = self.rfile.readline(65537)
if len(self.raw_requestline) > 65536:
self.requestline = ''
self.request_version = ''
self.command = ''
self.send_error(414)
return
if not self.raw_requestline:
self.close_connection = 1
return
if not self.parse_request():
# An error code has been sent, just exit
return
mname = 'do_' + self.command
if not hasattr(self, mname):
self.send_error(501, "Unsupported method (%r)" % self.command)
return
method = getattr(self, mname)
method()
self.wfile.flush() #actually send the response if not already done.
except socket.timeout:
#a read or a write timed out. Discard this connection
self.log_error("Request timed out: %r", sys.exc_info()[1])
self.close_connection = 1
return
def handle(self):
"""Handle multiple requests if necessary."""
self.close_connection = 1
self.handle_one_request()
while not self.close_connection:
self.handle_one_request()
def send_error(self, code, message=None):
"""Send and log an error reply.
Arguments are the error code, and a detailed message.
The detailed message defaults to the short entry matching the
response code.
This sends an error response (so it must be called before any
output has been generated), logs the error, and finally sends
a piece of HTML explaining the error to the user.
"""
try:
short, long = self.responses[code]
except KeyError:
short, long = '???', '???'
if message is None:
message = short
explain = long
self.log_error("code %d, message %s", code, message)
# using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201)
content = (self.error_message_format %
{'code': code, 'message': _quote_html(message), 'explain': explain})
self.send_response(code, message)
self.send_header("Content-Type", self.error_content_type)
self.send_header('Connection', 'close')
self.end_headers()
if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
self.wfile.write(content)
error_message_format = DEFAULT_ERROR_MESSAGE
error_content_type = DEFAULT_ERROR_CONTENT_TYPE
def send_response(self, code, message=None):
"""Send the response header and log the response code.
Also send two standard headers with the server software
version and the current date.
"""
self.log_request(code)
if message is None:
if code in self.responses:
message = self.responses[code][0]
else:
message = ''
if self.request_version != 'HTTP/0.9':
self.wfile.write("%s %d %s\r\n" %
(self.protocol_version, code, message))
# print (self.protocol_version, code, message)
self.send_header('Server', self.version_string())
self.send_header('Date', self.date_time_string())
def send_header(self, keyword, value):
"""Send a MIME header."""
if self.request_version != 'HTTP/0.9':
self.wfile.write("%s: %s\r\n" % (keyword, value))
if keyword.lower() == 'connection':
if value.lower() == 'close':
self.close_connection = 1
elif value.lower() == 'keep-alive':
self.close_connection = 0
def end_headers(self):
"""Send the blank line ending the MIME headers."""
if self.request_version != 'HTTP/0.9':
self.wfile.write("\r\n")
def log_request(self, code='-', size='-'):
"""Log an accepted request.
This is called by send_response().
"""
self.log_message('"%s" %s %s',
self.requestline, str(code), str(size))
def log_error(self, format, *args):
"""Log an error.
This is called when a request cannot be fulfilled. By
default it passes the message on to log_message().
Arguments are the same as for log_message().
XXX This should go to the separate error log.
"""
self.log_message(format, *args)
def log_message(self, format, *args):
"""Log an arbitrary message.
This is used by all other logging functions. Override
it if you have specific logging wishes.
The first argument, FORMAT, is a format string for the
message to be logged. If the format string contains
any % escapes requiring parameters, they should be
specified as subsequent arguments (it's just like
printf!).
The client host and current date/time are prefixed to
every message.
"""
sys.stderr.write("%s - - [%s] %s\n" %
(self.address_string(),
self.log_date_time_string(),
format%args))
def version_string(self):
"""Return the server software version string."""
return self.server_version + ' ' + self.sys_version
def date_time_string(self, timestamp=None):
"""Return the current date and time formatted for a message header."""
if timestamp is None:
timestamp = time.time()
year, month, day, hh, mm, ss, wd, y, z = time.gmtime(timestamp)
s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
self.weekdayname[wd],
day, self.monthname[month], year,
hh, mm, ss)
return s
def log_date_time_string(self):
"""Return the current time formatted for logging."""
now = time.time()
year, month, day, hh, mm, ss, x, y, z = time.localtime(now)
s = "%02d/%3s/%04d %02d:%02d:%02d" % (
day, self.monthname[month], year, hh, mm, ss)
return s
weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
monthname = [None,
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
def address_string(self):
"""Return the client address formatted for logging.
This version looks up the full hostname using gethostbyaddr(),
and tries to find a name that contains at least one dot.
"""
host, port = self.client_address[:2]
return socket.getfqdn(host)
# Essentially static class variables
# The version of the HTTP protocol we support.
# Set this to HTTP/1.1 to enable automatic keepalive
protocol_version = "HTTP/1.0"
# The Message-like class used to parse headers
MessageClass = mimetools.Message
# Table mapping response codes to messages; entries have the
# form {code: (shortmessage, longmessage)}.
# See RFC 2616.
responses = {
100: ('Continue', 'Request received, please continue'),
101: ('Switching Protocols',
'Switching to new protocol; obey Upgrade header'),
200: ('OK', 'Request fulfilled, document follows'),
201: ('Created', 'Document created, URL follows'),
202: ('Accepted',
'Request accepted, processing continues off-line'),
203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
204: ('No Content', 'Request fulfilled, nothing follows'),
205: ('Reset Content', 'Clear input form for further input.'),
206: ('Partial Content', 'Partial content follows.'),
300: ('Multiple Choices',
'Object has several resources -- see URI list'),
301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
302: ('Found', 'Object moved temporarily -- see URI list'),
303: ('See Other', 'Object moved -- see Method and URL list'),
304: ('Not Modified',
'Document has not changed since given time'),
305: ('Use Proxy',
'You must use proxy specified in Location to access this '
'resource.'),
307: ('Temporary Redirect',
'Object moved temporarily -- see URI list'),
400: ('Bad Request',
'Bad request syntax or unsupported method'),
401: ('Unauthorized',
'No permission -- see authorization schemes'),
402: ('Payment Required',
'No payment -- see charging schemes'),
403: ('Forbidden',
'Request forbidden -- authorization will not help'),
404: ('Not Found', 'Nothing matches the given URI'),
405: ('Method Not Allowed',
'Specified method is invalid for this resource.'),
406: ('Not Acceptable', 'URI not available in preferred format.'),
407: ('Proxy Authentication Required', 'You must authenticate with '
'this proxy before proceeding.'),
408: ('Request Timeout', 'Request timed out; try again later.'),
409: ('Conflict', 'Request conflict.'),
410: ('Gone',
'URI no longer exists and has been permanently removed.'),
411: ('Length Required', 'Client must specify Content-Length.'),
412: ('Precondition Failed', 'Precondition in headers is false.'),
413: ('Request Entity Too Large', 'Entity is too large.'),
414: ('Request-URI Too Long', 'URI is too long.'),
415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
416: ('Requested Range Not Satisfiable',
'Cannot satisfy request range.'),
417: ('Expectation Failed',
'Expect condition could not be satisfied.'),
500: ('Internal Server Error', 'Server got itself in trouble'),
501: ('Not Implemented',
'Server does not support this operation'),
502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
503: ('Service Unavailable',
'The server cannot process the request due to a high load'),
504: ('Gateway Timeout',
'The gateway server did not receive a timely response'),
505: ('HTTP Version Not Supported', 'Cannot fulfill request.'),
}
def test(HandlerClass = BaseHTTPRequestHandler,
ServerClass = HTTPServer, protocol="HTTP/1.0"):
"""Test the HTTP request handler class.
This runs an HTTP server on port 8000 (or the first command line
argument).
"""
if sys.argv[1:]:
port = int(sys.argv[1])
else:
port = 8000
server_address = ('', port)
HandlerClass.protocol_version = protocol
httpd = ServerClass(server_address, HandlerClass)
sa = httpd.socket.getsockname()
print ("Serving HTTP on", sa[0], "port", sa[1], "...")
httpd.serve_forever()
if __name__ == '__main__':
test()

View file

@ -1,601 +0,0 @@
#Just a copy of the version in python 2.5 to be used if it's not available in jython 2.1
"""Simple XML-RPC Server.
This module can be used to create simple XML-RPC servers
by creating a server and either installing functions, a
class instance, or by extending the SimpleXMLRPCServer
class.
It can also be used to handle XML-RPC requests in a CGI
environment using CGIXMLRPCRequestHandler.
A list of possible usage patterns follows:
1. Install functions:
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_function(pow)
server.register_function(lambda x,y: x+y, 'add')
server.serve_forever()
2. Install an instance:
class MyFuncs:
def __init__(self):
# make all of the string functions available through
# string.func_name
import string
self.string = string
def _listMethods(self):
# implement this method so that system.listMethods
# knows to advertise the strings methods
return list_public_methods(self) + \
['string.' + method for method in list_public_methods(self.string)]
def pow(self, x, y): return pow(x, y)
def add(self, x, y) : return x + y
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_introspection_functions()
server.register_instance(MyFuncs())
server.serve_forever()
3. Install an instance with custom dispatch method:
class Math:
def _listMethods(self):
# this method must be present for system.listMethods
# to work
return ['add', 'pow']
def _methodHelp(self, method):
# this method must be present for system.methodHelp
# to work
if method == 'add':
return "add(2,3) => 5"
elif method == 'pow':
return "pow(x, y[, z]) => number"
else:
# By convention, return empty
# string if no help is available
return ""
def _dispatch(self, method, params):
if method == 'pow':
return pow(*params)
elif method == 'add':
return params[0] + params[1]
else:
raise 'bad method'
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_introspection_functions()
server.register_instance(Math())
server.serve_forever()
4. Subclass SimpleXMLRPCServer:
class MathServer(SimpleXMLRPCServer):
def _dispatch(self, method, params):
try:
# We are forcing the 'export_' prefix on methods that are
# callable through XML-RPC to prevent potential security
# problems
func = getattr(self, 'export_' + method)
except AttributeError:
raise Exception('method "%s" is not supported' % method)
else:
return func(*params)
def export_add(self, x, y):
return x + y
server = MathServer(("localhost", 8000))
server.serve_forever()
5. CGI script:
server = CGIXMLRPCRequestHandler()
server.register_function(pow)
server.handle_request()
"""
# Written by Brian Quinlan (brian@sweetapp.com).
# Based on code written by Fredrik Lundh.
from _pydev_imps import _pydev_xmlrpclib as xmlrpclib
from _pydev_imps._pydev_xmlrpclib import Fault
from _pydev_imps import _pydev_SocketServer as SocketServer
from _pydev_imps import _pydev_BaseHTTPServer as BaseHTTPServer
import sys
import os
try:
import fcntl
except ImportError:
fcntl = None
def resolve_dotted_attribute(obj, attr, allow_dotted_names=True):
"""resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d
Resolves a dotted attribute name to an object. Raises
an AttributeError if any attribute in the chain starts with a '_'.
If the optional allow_dotted_names argument is false, dots are not
supported and this function operates similar to getattr(obj, attr).
"""
if allow_dotted_names:
attrs = attr.split('.')
else:
attrs = [attr]
for i in attrs:
if i.startswith('_'):
raise AttributeError(
'attempt to access private attribute "%s"' % i
)
else:
obj = getattr(obj, i)
return obj
def list_public_methods(obj):
"""Returns a list of attribute strings, found in the specified
object, which represent callable attributes"""
return [member for member in dir(obj)
if not member.startswith('_') and
callable(getattr(obj, member))]
def remove_duplicates(lst):
"""remove_duplicates([2,2,2,1,3,3]) => [3,1,2]
Returns a copy of a list without duplicates. Every list
item must be hashable and the order of the items in the
resulting list is not defined.
"""
u = {}
for x in lst:
u[x] = 1
return u.keys()
class SimpleXMLRPCDispatcher:
"""Mix-in class that dispatches XML-RPC requests.
This class is used to register XML-RPC method handlers
and then to dispatch them. There should never be any
reason to instantiate this class directly.
"""
def __init__(self, allow_none, encoding):
self.funcs = {}
self.instance = None
self.allow_none = allow_none
self.encoding = encoding
def register_instance(self, instance, allow_dotted_names=False):
"""Registers an instance to respond to XML-RPC requests.
Only one instance can be installed at a time.
If the registered instance has a _dispatch method then that
method will be called with the name of the XML-RPC method and
its parameters as a tuple
e.g. instance._dispatch('add',(2,3))
If the registered instance does not have a _dispatch method
then the instance will be searched to find a matching method
and, if found, will be called. Methods beginning with an '_'
are considered private and will not be called by
SimpleXMLRPCServer.
If a registered function matches a XML-RPC request, then it
will be called instead of the registered instance.
If the optional allow_dotted_names argument is true and the
instance does not have a _dispatch method, method names
containing dots are supported and resolved, as long as none of
the name segments start with an '_'.
*** SECURITY WARNING: ***
Enabling the allow_dotted_names options allows intruders
to access your module's global variables and may allow
intruders to execute arbitrary code on your machine. Only
use this option on a secure, closed network.
"""
self.instance = instance
self.allow_dotted_names = allow_dotted_names
def register_function(self, function, name=None):
"""Registers a function to respond to XML-RPC requests.
The optional name argument can be used to set a Unicode name
for the function.
"""
if name is None:
name = function.__name__
self.funcs[name] = function
def register_introspection_functions(self):
"""Registers the XML-RPC introspection methods in the system
namespace.
see http://xmlrpc.usefulinc.com/doc/reserved.html
"""
self.funcs.update({'system.listMethods' : self.system_listMethods,
'system.methodSignature' : self.system_methodSignature,
'system.methodHelp' : self.system_methodHelp})
def register_multicall_functions(self):
"""Registers the XML-RPC multicall method in the system
namespace.
see http://www.xmlrpc.com/discuss/msgReader$1208"""
self.funcs.update({'system.multicall' : self.system_multicall})
def _marshaled_dispatch(self, data, dispatch_method=None):
"""Dispatches an XML-RPC method from marshalled (XML) data.
XML-RPC methods are dispatched from the marshalled (XML) data
using the _dispatch method and the result is returned as
marshalled data. For backwards compatibility, a dispatch
function can be provided as an argument (see comment in
SimpleXMLRPCRequestHandler.do_POST) but overriding the
existing method through subclassing is the prefered means
of changing method dispatch behavior.
"""
try:
params, method = xmlrpclib.loads(data)
# generate response
if dispatch_method is not None:
response = dispatch_method(method, params)
else:
response = self._dispatch(method, params)
# wrap response in a singleton tuple
response = (response,)
response = xmlrpclib.dumps(response, methodresponse=1,
allow_none=self.allow_none, encoding=self.encoding)
except Fault, fault:
response = xmlrpclib.dumps(fault, allow_none=self.allow_none,
encoding=self.encoding)
except:
# report exception back to server
response = xmlrpclib.dumps(
xmlrpclib.Fault(1, "%s:%s" % (sys.exc_type, sys.exc_value)), #@UndefinedVariable exc_value only available when we actually have an exception
encoding=self.encoding, allow_none=self.allow_none,
)
return response
def system_listMethods(self):
"""system.listMethods() => ['add', 'subtract', 'multiple']
Returns a list of the methods supported by the server."""
methods = self.funcs.keys()
if self.instance is not None:
# Instance can implement _listMethod to return a list of
# methods
if hasattr(self.instance, '_listMethods'):
methods = remove_duplicates(
methods + self.instance._listMethods()
)
# if the instance has a _dispatch method then we
# don't have enough information to provide a list
# of methods
elif not hasattr(self.instance, '_dispatch'):
methods = remove_duplicates(
methods + list_public_methods(self.instance)
)
methods.sort()
return methods
def system_methodSignature(self, method_name):
"""system.methodSignature('add') => [double, int, int]
Returns a list describing the signature of the method. In the
above example, the add method takes two integers as arguments
and returns a double result.
This server does NOT support system.methodSignature."""
# See http://xmlrpc.usefulinc.com/doc/sysmethodsig.html
return 'signatures not supported'
def system_methodHelp(self, method_name):
"""system.methodHelp('add') => "Adds two integers together"
Returns a string containing documentation for the specified method."""
method = None
if self.funcs.has_key(method_name):
method = self.funcs[method_name]
elif self.instance is not None:
# Instance can implement _methodHelp to return help for a method
if hasattr(self.instance, '_methodHelp'):
return self.instance._methodHelp(method_name)
# if the instance has a _dispatch method then we
# don't have enough information to provide help
elif not hasattr(self.instance, '_dispatch'):
try:
method = resolve_dotted_attribute(
self.instance,
method_name,
self.allow_dotted_names
)
except AttributeError:
pass
# Note that we aren't checking that the method actually
# be a callable object of some kind
if method is None:
return ""
else:
try:
import pydoc
except ImportError:
return "" #not there for jython
else:
return pydoc.getdoc(method)
def system_multicall(self, call_list):
"""system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => \
[[4], ...]
Allows the caller to package multiple XML-RPC calls into a single
request.
See http://www.xmlrpc.com/discuss/msgReader$1208
"""
results = []
for call in call_list:
method_name = call['methodName']
params = call['params']
try:
# XXX A marshalling error in any response will fail the entire
# multicall. If someone cares they should fix this.
results.append([self._dispatch(method_name, params)])
except Fault, fault:
results.append(
{'faultCode' : fault.faultCode,
'faultString' : fault.faultString}
)
except:
results.append(
{'faultCode' : 1,
'faultString' : "%s:%s" % (sys.exc_type, sys.exc_value)} #@UndefinedVariable exc_value only available when we actually have an exception
)
return results
def _dispatch(self, method, params):
"""Dispatches the XML-RPC method.
XML-RPC calls are forwarded to a registered function that
matches the called XML-RPC method name. If no such function
exists then the call is forwarded to the registered instance,
if available.
If the registered instance has a _dispatch method then that
method will be called with the name of the XML-RPC method and
its parameters as a tuple
e.g. instance._dispatch('add',(2,3))
If the registered instance does not have a _dispatch method
then the instance will be searched to find a matching method
and, if found, will be called.
Methods beginning with an '_' are considered private and will
not be called.
"""
func = None
try:
# check to see if a matching function has been registered
func = self.funcs[method]
except KeyError:
if self.instance is not None:
# check for a _dispatch method
if hasattr(self.instance, '_dispatch'):
return self.instance._dispatch(method, params)
else:
# call instance method directly
try:
func = resolve_dotted_attribute(
self.instance,
method,
self.allow_dotted_names
)
except AttributeError:
pass
if func is not None:
return func(*params)
else:
raise Exception('method "%s" is not supported' % method)
class SimpleXMLRPCRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""Simple XML-RPC request handler class.
Handles all HTTP POST requests and attempts to decode them as
XML-RPC requests.
"""
# Class attribute listing the accessible path components;
# paths not on this list will result in a 404 error.
rpc_paths = ('/', '/RPC2')
def is_rpc_path_valid(self):
if self.rpc_paths:
return self.path in self.rpc_paths
else:
# If .rpc_paths is empty, just assume all paths are legal
return True
def do_POST(self):
"""Handles the HTTP POST request.
Attempts to interpret all HTTP POST requests as XML-RPC calls,
which are forwarded to the server's _dispatch method for handling.
"""
# Check that the path is legal
if not self.is_rpc_path_valid():
self.report_404()
return
try:
# Get arguments by reading body of request.
# We read this in chunks to avoid straining
# socket.read(); around the 10 or 15Mb mark, some platforms
# begin to have problems (bug #792570).
max_chunk_size = 10 * 1024 * 1024
size_remaining = int(self.headers["content-length"])
L = []
while size_remaining:
chunk_size = min(size_remaining, max_chunk_size)
L.append(self.rfile.read(chunk_size))
size_remaining -= len(L[-1])
data = ''.join(L)
# In previous versions of SimpleXMLRPCServer, _dispatch
# could be overridden in this class, instead of in
# SimpleXMLRPCDispatcher. To maintain backwards compatibility,
# check to see if a subclass implements _dispatch and dispatch
# using that method if present.
response = self.server._marshaled_dispatch(
data, getattr(self, '_dispatch', None)
)
except: # This should only happen if the module is buggy
# internal error, report as HTTP server error
self.send_response(500)
self.end_headers()
else:
# got a valid XML RPC response
self.send_response(200)
self.send_header("Content-type", "text/xml")
self.send_header("Content-length", str(len(response)))
self.end_headers()
self.wfile.write(response)
# shut down the connection
self.wfile.flush()
self.connection.shutdown(1)
def report_404 (self):
# Report a 404 error
self.send_response(404)
response = 'No such page'
self.send_header("Content-type", "text/plain")
self.send_header("Content-length", str(len(response)))
self.end_headers()
self.wfile.write(response)
# shut down the connection
self.wfile.flush()
self.connection.shutdown(1)
def log_request(self, code='-', size='-'):
"""Selectively log an accepted request."""
if self.server.logRequests:
BaseHTTPServer.BaseHTTPRequestHandler.log_request(self, code, size)
class SimpleXMLRPCServer(SocketServer.TCPServer,
SimpleXMLRPCDispatcher):
"""Simple XML-RPC server.
Simple XML-RPC server that allows functions and a single instance
to be installed to handle requests. The default implementation
attempts to dispatch XML-RPC calls to the functions or instance
installed in the server. Override the _dispatch method inhereted
from SimpleXMLRPCDispatcher to change this behavior.
"""
allow_reuse_address = True
def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler,
logRequests=True, allow_none=False, encoding=None):
self.logRequests = logRequests
SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding)
SocketServer.TCPServer.__init__(self, addr, requestHandler)
# [Bug #1222790] If possible, set close-on-exec flag; if a
# method spawns a subprocess, the subprocess shouldn't have
# the listening socket open.
if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'):
flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD)
flags |= fcntl.FD_CLOEXEC
fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags)
class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher):
"""Simple handler for XML-RPC data passed through CGI."""
def __init__(self, allow_none=False, encoding=None):
SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding)
def handle_xmlrpc(self, request_text):
"""Handle a single XML-RPC request"""
response = self._marshaled_dispatch(request_text)
sys.stdout.write('Content-Type: text/xml\n')
sys.stdout.write('Content-Length: %d\n' % len(response))
sys.stdout.write('\n')
sys.stdout.write(response)
def handle_get(self):
"""Handle a single HTTP GET request.
Default implementation indicates an error because
XML-RPC uses the POST method.
"""
code = 400
message, explain = \
BaseHTTPServer.BaseHTTPRequestHandler.responses[code]
response = BaseHTTPServer.DEFAULT_ERROR_MESSAGE % { #@UndefinedVariable
'code' : code,
'message' : message,
'explain' : explain
}
sys.stdout.write('Status: %d %s\n' % (code, message))
sys.stdout.write('Content-Type: text/html\n')
sys.stdout.write('Content-Length: %d\n' % len(response))
sys.stdout.write('\n')
sys.stdout.write(response)
def handle_request(self, request_text=None):
"""Handle a single XML-RPC request passed through a CGI post method.
If no XML data is given then it is read from stdin. The resulting
XML-RPC response is printed to stdout along with the correct HTTP
headers.
"""
if request_text is None and \
os.environ.get('REQUEST_METHOD', None) == 'GET':
self.handle_get()
else:
# POST data is normally available through stdin
if request_text is None:
request_text = sys.stdin.read()
self.handle_xmlrpc(request_text)
if __name__ == '__main__':
sys.stdout.write('Running XML-RPC server on port 8000\n')
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_function(pow)
server.register_function(lambda x, y: x + y, 'add')
server.serve_forever()

View file

@ -1,715 +0,0 @@
"""Generic socket server classes.
This module tries to capture the various aspects of defining a server:
For socket-based servers:
- address family:
- AF_INET{,6}: IP (Internet Protocol) sockets (default)
- AF_UNIX: Unix domain sockets
- others, e.g. AF_DECNET are conceivable (see <socket.h>
- socket type:
- SOCK_STREAM (reliable stream, e.g. TCP)
- SOCK_DGRAM (datagrams, e.g. UDP)
For request-based servers (including socket-based):
- client address verification before further looking at the request
(This is actually a hook for any processing that needs to look
at the request before anything else, e.g. logging)
- how to handle multiple requests:
- synchronous (one request is handled at a time)
- forking (each request is handled by a new process)
- threading (each request is handled by a new thread)
The classes in this module favor the server type that is simplest to
write: a synchronous TCP/IP server. This is bad class design, but
save some typing. (There's also the issue that a deep class hierarchy
slows down method lookups.)
There are five classes in an inheritance diagram, four of which represent
synchronous servers of four types:
+------------+
| BaseServer |
+------------+
|
v
+-----------+ +------------------+
| TCPServer |------->| UnixStreamServer |
+-----------+ +------------------+
|
v
+-----------+ +--------------------+
| UDPServer |------->| UnixDatagramServer |
+-----------+ +--------------------+
Note that UnixDatagramServer derives from UDPServer, not from
UnixStreamServer -- the only difference between an IP and a Unix
stream server is the address family, which is simply repeated in both
unix server classes.
Forking and threading versions of each type of server can be created
using the ForkingMixIn and ThreadingMixIn mix-in classes. For
instance, a threading UDP server class is created as follows:
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
The Mix-in class must come first, since it overrides a method defined
in UDPServer! Setting the various member variables also changes
the behavior of the underlying server mechanism.
To implement a service, you must derive a class from
BaseRequestHandler and redefine its handle() method. You can then run
various versions of the service by combining one of the server classes
with your request handler class.
The request handler class must be different for datagram or stream
services. This can be hidden by using the request handler
subclasses StreamRequestHandler or DatagramRequestHandler.
Of course, you still have to use your head!
For instance, it makes no sense to use a forking server if the service
contains state in memory that can be modified by requests (since the
modifications in the child process would never reach the initial state
kept in the parent process and passed to each child). In this case,
you can use a threading server, but you will probably have to use
locks to avoid two requests that come in nearly simultaneous to apply
conflicting changes to the server state.
On the other hand, if you are building e.g. an HTTP server, where all
data is stored externally (e.g. in the file system), a synchronous
class will essentially render the service "deaf" while one request is
being handled -- which may be for a very long time if a client is slow
to read all the data it has requested. Here a threading or forking
server is appropriate.
In some cases, it may be appropriate to process part of a request
synchronously, but to finish processing in a forked child depending on
the request data. This can be implemented by using a synchronous
server and doing an explicit fork in the request handler class
handle() method.
Another approach to handling multiple simultaneous requests in an
environment that supports neither threads nor fork (or where these are
too expensive or inappropriate for the service) is to maintain an
explicit table of partially finished requests and to use select() to
decide which request to work on next (or whether to handle a new
incoming request). This is particularly important for stream services
where each client can potentially be connected for a long time (if
threads or subprocesses cannot be used).
Future work:
- Standard classes for Sun RPC (which uses either UDP or TCP)
- Standard mix-in classes to implement various authentication
and encryption schemes
- Standard framework for select-based multiplexing
XXX Open problems:
- What to do with out-of-band data?
BaseServer:
- split generic "request" functionality out into BaseServer class.
Copyright (C) 2000 Luke Kenneth Casson Leighton <lkcl@samba.org>
example: read entries from a SQL database (requires overriding
get_request() to return a table entry from the database).
entry is processed by a RequestHandlerClass.
"""
# Author of the BaseServer patch: Luke Kenneth Casson Leighton
# XXX Warning!
# There is a test suite for this module, but it cannot be run by the
# standard regression test.
# To run it manually, run Lib/test/test_socketserver.py.
__version__ = "0.4"
from _pydev_imps._pydev_saved_modules import socket
from _pydev_imps._pydev_saved_modules import select
import sys
import os
try:
from _pydev_imps._pydev_saved_modules import threading
except ImportError:
import dummy_threading as threading
__all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer",
"ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler",
"StreamRequestHandler","DatagramRequestHandler",
"ThreadingMixIn", "ForkingMixIn"]
if hasattr(socket, "AF_UNIX"):
__all__.extend(["UnixStreamServer","UnixDatagramServer",
"ThreadingUnixStreamServer",
"ThreadingUnixDatagramServer"])
class BaseServer:
"""Base class for server classes.
Methods for the caller:
- __init__(server_address, RequestHandlerClass)
- serve_forever(poll_interval=0.5)
- shutdown()
- handle_request() # if you do not use serve_forever()
- fileno() -> int # for select()
Methods that may be overridden:
- server_bind()
- server_activate()
- get_request() -> request, client_address
- handle_timeout()
- verify_request(request, client_address)
- server_close()
- process_request(request, client_address)
- shutdown_request(request)
- close_request(request)
- handle_error()
Methods for derived classes:
- finish_request(request, client_address)
Class variables that may be overridden by derived classes or
instances:
- timeout
- address_family
- socket_type
- allow_reuse_address
Instance variables:
- RequestHandlerClass
- socket
"""
timeout = None
def __init__(self, server_address, RequestHandlerClass):
"""Constructor. May be extended, do not override."""
self.server_address = server_address
self.RequestHandlerClass = RequestHandlerClass
self.__is_shut_down = threading.Event() # @UndefinedVariable
self.__shutdown_request = False
def server_activate(self):
"""Called by constructor to activate the server.
May be overridden.
"""
pass
def serve_forever(self, poll_interval=0.5):
"""Handle one request at a time until shutdown.
Polls for shutdown every poll_interval seconds. Ignores
self.timeout. If you need to do periodic tasks, do them in
another thread.
"""
self.__is_shut_down.clear()
try:
while not self.__shutdown_request:
# XXX: Consider using another file descriptor or
# connecting to the socket to wake this up instead of
# polling. Polling reduces our responsiveness to a
# shutdown request and wastes cpu at all other times.
r, w, e = select.select([self], [], [], poll_interval)
if self in r:
self._handle_request_noblock()
finally:
self.__shutdown_request = False
self.__is_shut_down.set()
def shutdown(self):
"""Stops the serve_forever loop.
Blocks until the loop has finished. This must be called while
serve_forever() is running in another thread, or it will
deadlock.
"""
self.__shutdown_request = True
self.__is_shut_down.wait()
# The distinction between handling, getting, processing and
# finishing a request is fairly arbitrary. Remember:
#
# - handle_request() is the top-level call. It calls
# select, get_request(), verify_request() and process_request()
# - get_request() is different for stream or datagram sockets
# - process_request() is the place that may fork a new process
# or create a new thread to finish the request
# - finish_request() instantiates the request handler class;
# this constructor will handle the request all by itself
def handle_request(self):
"""Handle one request, possibly blocking.
Respects self.timeout.
"""
# Support people who used socket.settimeout() to escape
# handle_request before self.timeout was available.
timeout = self.socket.gettimeout()
if timeout is None:
timeout = self.timeout
elif self.timeout is not None:
timeout = min(timeout, self.timeout)
fd_sets = select.select([self], [], [], timeout)
if not fd_sets[0]:
self.handle_timeout()
return
self._handle_request_noblock()
def _handle_request_noblock(self):
"""Handle one request, without blocking.
I assume that select.select has returned that the socket is
readable before this function was called, so there should be
no risk of blocking in get_request().
"""
try:
request, client_address = self.get_request()
except socket.error:
return
if self.verify_request(request, client_address):
try:
self.process_request(request, client_address)
except:
self.handle_error(request, client_address)
self.shutdown_request(request)
def handle_timeout(self):
"""Called if no new request arrives within self.timeout.
Overridden by ForkingMixIn.
"""
pass
def verify_request(self, request, client_address):
"""Verify the request. May be overridden.
Return True if we should proceed with this request.
"""
return True
def process_request(self, request, client_address):
"""Call finish_request.
Overridden by ForkingMixIn and ThreadingMixIn.
"""
self.finish_request(request, client_address)
self.shutdown_request(request)
def server_close(self):
"""Called to clean-up the server.
May be overridden.
"""
pass
def finish_request(self, request, client_address):
"""Finish one request by instantiating RequestHandlerClass."""
self.RequestHandlerClass(request, client_address, self)
def shutdown_request(self, request):
"""Called to shutdown and close an individual request."""
self.close_request(request)
def close_request(self, request):
"""Called to clean up an individual request."""
pass
def handle_error(self, request, client_address):
"""Handle an error gracefully. May be overridden.
The default is to print a traceback and continue.
"""
print '-'*40
print 'Exception happened during processing of request from',
print client_address
import traceback
traceback.print_exc() # XXX But this goes to stderr!
print '-'*40
class TCPServer(BaseServer):
"""Base class for various socket-based server classes.
Defaults to synchronous IP stream (i.e., TCP).
Methods for the caller:
- __init__(server_address, RequestHandlerClass, bind_and_activate=True)
- serve_forever(poll_interval=0.5)
- shutdown()
- handle_request() # if you don't use serve_forever()
- fileno() -> int # for select()
Methods that may be overridden:
- server_bind()
- server_activate()
- get_request() -> request, client_address
- handle_timeout()
- verify_request(request, client_address)
- process_request(request, client_address)
- shutdown_request(request)
- close_request(request)
- handle_error()
Methods for derived classes:
- finish_request(request, client_address)
Class variables that may be overridden by derived classes or
instances:
- timeout
- address_family
- socket_type
- request_queue_size (only for stream sockets)
- allow_reuse_address
Instance variables:
- server_address
- RequestHandlerClass
- socket
"""
address_family = socket.AF_INET
socket_type = socket.SOCK_STREAM
request_queue_size = 5
allow_reuse_address = False
def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True):
"""Constructor. May be extended, do not override."""
BaseServer.__init__(self, server_address, RequestHandlerClass)
self.socket = socket.socket(self.address_family,
self.socket_type)
if bind_and_activate:
self.server_bind()
self.server_activate()
def server_bind(self):
"""Called by constructor to bind the socket.
May be overridden.
"""
if self.allow_reuse_address:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(self.server_address)
self.server_address = self.socket.getsockname()
def server_activate(self):
"""Called by constructor to activate the server.
May be overridden.
"""
self.socket.listen(self.request_queue_size)
def server_close(self):
"""Called to clean-up the server.
May be overridden.
"""
self.socket.close()
def fileno(self):
"""Return socket file number.
Interface required by select().
"""
return self.socket.fileno()
def get_request(self):
"""Get the request and client address from the socket.
May be overridden.
"""
return self.socket.accept()
def shutdown_request(self, request):
"""Called to shutdown and close an individual request."""
try:
#explicitly shutdown. socket.close() merely releases
#the socket and waits for GC to perform the actual close.
request.shutdown(socket.SHUT_WR)
except socket.error:
pass #some platforms may raise ENOTCONN here
self.close_request(request)
def close_request(self, request):
"""Called to clean up an individual request."""
request.close()
class UDPServer(TCPServer):
"""UDP server class."""
allow_reuse_address = False
socket_type = socket.SOCK_DGRAM
max_packet_size = 8192
def get_request(self):
data, client_addr = self.socket.recvfrom(self.max_packet_size)
return (data, self.socket), client_addr
def server_activate(self):
# No need to call listen() for UDP.
pass
def shutdown_request(self, request):
# No need to shutdown anything.
self.close_request(request)
def close_request(self, request):
# No need to close anything.
pass
class ForkingMixIn:
"""Mix-in class to handle each request in a new process."""
timeout = 300
active_children = None
max_children = 40
def collect_children(self):
"""Internal routine to wait for children that have exited."""
if self.active_children is None: return
while len(self.active_children) >= self.max_children:
# XXX: This will wait for any child process, not just ones
# spawned by this library. This could confuse other
# libraries that expect to be able to wait for their own
# children.
try:
pid, status = os.waitpid(0, 0)
except os.error:
pid = None
if pid not in self.active_children: continue
self.active_children.remove(pid)
# XXX: This loop runs more system calls than it ought
# to. There should be a way to put the active_children into a
# process group and then use os.waitpid(-pgid) to wait for any
# of that set, but I couldn't find a way to allocate pgids
# that couldn't collide.
for child in self.active_children:
try:
pid, status = os.waitpid(child, os.WNOHANG) # @UndefinedVariable
except os.error:
pid = None
if not pid: continue
try:
self.active_children.remove(pid)
except ValueError, e:
raise ValueError('%s. x=%d and list=%r' % (e.message, pid,
self.active_children))
def handle_timeout(self):
"""Wait for zombies after self.timeout seconds of inactivity.
May be extended, do not override.
"""
self.collect_children()
def process_request(self, request, client_address):
"""Fork a new subprocess to process the request."""
self.collect_children()
pid = os.fork() # @UndefinedVariable
if pid:
# Parent process
if self.active_children is None:
self.active_children = []
self.active_children.append(pid)
self.close_request(request) #close handle in parent process
return
else:
# Child process.
# This must never return, hence os._exit()!
try:
self.finish_request(request, client_address)
self.shutdown_request(request)
os._exit(0)
except:
try:
self.handle_error(request, client_address)
self.shutdown_request(request)
finally:
os._exit(1)
class ThreadingMixIn:
"""Mix-in class to handle each request in a new thread."""
# Decides how threads will act upon termination of the
# main process
daemon_threads = False
def process_request_thread(self, request, client_address):
"""Same as in BaseServer but as a thread.
In addition, exception handling is done here.
"""
try:
self.finish_request(request, client_address)
self.shutdown_request(request)
except:
self.handle_error(request, client_address)
self.shutdown_request(request)
def process_request(self, request, client_address):
"""Start a new thread to process the request."""
t = threading.Thread(target = self.process_request_thread, # @UndefinedVariable
args = (request, client_address))
t.daemon = self.daemon_threads
t.start()
class ForkingUDPServer(ForkingMixIn, UDPServer): pass
class ForkingTCPServer(ForkingMixIn, TCPServer): pass
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass
if hasattr(socket, 'AF_UNIX'):
class UnixStreamServer(TCPServer):
address_family = socket.AF_UNIX # @UndefinedVariable
class UnixDatagramServer(UDPServer):
address_family = socket.AF_UNIX # @UndefinedVariable
class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass
class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass
class BaseRequestHandler:
"""Base class for request handler classes.
This class is instantiated for each request to be handled. The
constructor sets the instance variables request, client_address
and server, and then calls the handle() method. To implement a
specific service, all you need to do is to derive a class which
defines a handle() method.
The handle() method can find the request as self.request, the
client address as self.client_address, and the server (in case it
needs access to per-server information) as self.server. Since a
separate instance is created for each request, the handle() method
can define arbitrary other instance variariables.
"""
def __init__(self, request, client_address, server):
self.request = request
self.client_address = client_address
self.server = server
self.setup()
try:
self.handle()
finally:
self.finish()
def setup(self):
pass
def handle(self):
pass
def finish(self):
pass
# The following two classes make it possible to use the same service
# class for stream or datagram servers.
# Each class sets up these instance variables:
# - rfile: a file object from which receives the request is read
# - wfile: a file object to which the reply is written
# When the handle() method returns, wfile is flushed properly
class StreamRequestHandler(BaseRequestHandler):
"""Define self.rfile and self.wfile for stream sockets."""
# Default buffer sizes for rfile, wfile.
# We default rfile to buffered because otherwise it could be
# really slow for large data (a getc() call per byte); we make
# wfile unbuffered because (a) often after a write() we want to
# read and we need to flush the line; (b) big writes to unbuffered
# files are typically optimized by stdio even when big reads
# aren't.
rbufsize = -1
wbufsize = 0
# A timeout to apply to the request socket, if not None.
timeout = None
# Disable nagle algorithm for this socket, if True.
# Use only when wbufsize != 0, to avoid small packets.
disable_nagle_algorithm = False
def setup(self):
self.connection = self.request
if self.timeout is not None:
self.connection.settimeout(self.timeout)
if self.disable_nagle_algorithm:
self.connection.setsockopt(socket.IPPROTO_TCP,
socket.TCP_NODELAY, True)
self.rfile = self.connection.makefile('rb', self.rbufsize)
self.wfile = self.connection.makefile('wb', self.wbufsize)
def finish(self):
if not self.wfile.closed:
self.wfile.flush()
self.wfile.close()
self.rfile.close()
class DatagramRequestHandler(BaseRequestHandler):
# XXX Regrettably, I cannot get this working on Linux;
# s.recvfrom() doesn't return a meaningful client address.
"""Define self.rfile and self.wfile for datagram sockets."""
def setup(self):
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
self.packet, self.socket = self.request
self.rfile = StringIO(self.packet)
self.wfile = StringIO()
def finish(self):
self.socket.sendto(self.wfile.getvalue(), self.client_address)

View file

@ -1,788 +0,0 @@
"""Get useful information from live Python objects.
This module encapsulates the interface provided by the internal special
attributes (func_*, co_*, im_*, tb_*, etc.) in a friendlier fashion.
It also provides some help for examining source code and class layout.
Here are some of the useful functions provided by this module:
ismodule(), isclass(), ismethod(), isfunction(), istraceback(),
isframe(), iscode(), isbuiltin(), isroutine() - check object types
getmembers() - get members of an object that satisfy a given condition
getfile(), getsourcefile(), getsource() - find an object's source code
getdoc(), getcomments() - get documentation on an object
getmodule() - determine the module that an object came from
getclasstree() - arrange classes so as to represent their hierarchy
getargspec(), getargvalues() - get info about function arguments
formatargspec(), formatargvalues() - format an argument spec
getouterframes(), getinnerframes() - get info about frames
currentframe() - get the current stack frame
stack(), trace() - get info about frames on the stack or in a traceback
"""
# This module is in the public domain. No warranties.
__author__ = 'Ka-Ping Yee <ping@lfw.org>'
__date__ = '1 Jan 2001'
import sys, os, types, string, re, imp, tokenize
# ----------------------------------------------------------- type-checking
def ismodule(object):
"""Return true if the object is a module.
Module objects provide these attributes:
__doc__ documentation string
__file__ filename (missing for built-in modules)"""
return isinstance(object, types.ModuleType)
def isclass(object):
"""Return true if the object is a class.
Class objects provide these attributes:
__doc__ documentation string
__module__ name of module in which this class was defined"""
return isinstance(object, types.ClassType) or hasattr(object, '__bases__')
def ismethod(object):
"""Return true if the object is an instance method.
Instance method objects provide these attributes:
__doc__ documentation string
__name__ name with which this method was defined
im_class class object in which this method belongs
im_func function object containing implementation of method
im_self instance to which this method is bound, or None"""
return isinstance(object, types.MethodType)
def ismethoddescriptor(object):
"""Return true if the object is a method descriptor.
But not if ismethod() or isclass() or isfunction() are true.
This is new in Python 2.2, and, for example, is true of int.__add__.
An object passing this test has a __get__ attribute but not a __set__
attribute, but beyond that the set of attributes varies. __name__ is
usually sensible, and __doc__ often is.
Methods implemented via descriptors that also pass one of the other
tests return false from the ismethoddescriptor() test, simply because
the other tests promise more -- you can, e.g., count on having the
im_func attribute (etc) when an object passes ismethod()."""
return (hasattr(object, "__get__")
and not hasattr(object, "__set__") # else it's a data descriptor
and not ismethod(object) # mutual exclusion
and not isfunction(object)
and not isclass(object))
def isfunction(object):
"""Return true if the object is a user-defined function.
Function objects provide these attributes:
__doc__ documentation string
__name__ name with which this function was defined
func_code code object containing compiled function bytecode
func_defaults tuple of any default values for arguments
func_doc (same as __doc__)
func_globals global namespace in which this function was defined
func_name (same as __name__)"""
return isinstance(object, types.FunctionType)
def istraceback(object):
"""Return true if the object is a traceback.
Traceback objects provide these attributes:
tb_frame frame object at this level
tb_lasti index of last attempted instruction in bytecode
tb_lineno current line number in Python source code
tb_next next inner traceback object (called by this level)"""
return isinstance(object, types.TracebackType)
def isframe(object):
"""Return true if the object is a frame object.
Frame objects provide these attributes:
f_back next outer frame object (this frame's caller)
f_builtins built-in namespace seen by this frame
f_code code object being executed in this frame
f_exc_traceback traceback if raised in this frame, or None
f_exc_type exception type if raised in this frame, or None
f_exc_value exception value if raised in this frame, or None
f_globals global namespace seen by this frame
f_lasti index of last attempted instruction in bytecode
f_lineno current line number in Python source code
f_locals local namespace seen by this frame
f_restricted 0 or 1 if frame is in restricted execution mode
f_trace tracing function for this frame, or None"""
return isinstance(object, types.FrameType)
def iscode(object):
"""Return true if the object is a code object.
Code objects provide these attributes:
co_argcount number of arguments (not including * or ** args)
co_code string of raw compiled bytecode
co_consts tuple of constants used in the bytecode
co_filename name of file in which this code object was created
co_firstlineno number of first line in Python source code
co_flags bitmap: 1=optimized | 2=newlocals | 4=*arg | 8=**arg
co_lnotab encoded mapping of line numbers to bytecode indices
co_name name with which this code object was defined
co_names tuple of names of local variables
co_nlocals number of local variables
co_stacksize virtual machine stack space required
co_varnames tuple of names of arguments and local variables"""
return isinstance(object, types.CodeType)
def isbuiltin(object):
"""Return true if the object is a built-in function or method.
Built-in functions and methods provide these attributes:
__doc__ documentation string
__name__ original name of this function or method
__self__ instance to which a method is bound, or None"""
return isinstance(object, types.BuiltinFunctionType)
def isroutine(object):
"""Return true if the object is any kind of function or method."""
return (isbuiltin(object)
or isfunction(object)
or ismethod(object)
or ismethoddescriptor(object))
def getmembers(object, predicate=None):
"""Return all members of an object as (name, value) pairs sorted by name.
Optionally, only return members that satisfy a given predicate."""
results = []
for key in dir(object):
value = getattr(object, key)
if not predicate or predicate(value):
results.append((key, value))
results.sort()
return results
def classify_class_attrs(cls):
"""Return list of attribute-descriptor tuples.
For each name in dir(cls), the return list contains a 4-tuple
with these elements:
0. The name (a string).
1. The kind of attribute this is, one of these strings:
'class method' created via classmethod()
'static method' created via staticmethod()
'property' created via property()
'method' any other flavor of method
'data' not a method
2. The class which defined this attribute (a class).
3. The object as obtained directly from the defining class's
__dict__, not via getattr. This is especially important for
data attributes: C.data is just a data object, but
C.__dict__['data'] may be a data descriptor with additional
info, like a __doc__ string.
"""
mro = getmro(cls)
names = dir(cls)
result = []
for name in names:
# Get the object associated with the name.
# Getting an obj from the __dict__ sometimes reveals more than
# using getattr. Static and class methods are dramatic examples.
if name in cls.__dict__:
obj = cls.__dict__[name]
else:
obj = getattr(cls, name)
# Figure out where it was defined.
homecls = getattr(obj, "__objclass__", None)
if homecls is None:
# search the dicts.
for base in mro:
if name in base.__dict__:
homecls = base
break
# Get the object again, in order to get it from the defining
# __dict__ instead of via getattr (if possible).
if homecls is not None and name in homecls.__dict__:
obj = homecls.__dict__[name]
# Also get the object via getattr.
obj_via_getattr = getattr(cls, name)
# Classify the object.
if isinstance(obj, staticmethod):
kind = "static method"
elif isinstance(obj, classmethod):
kind = "class method"
elif isinstance(obj, property):
kind = "property"
elif (ismethod(obj_via_getattr) or
ismethoddescriptor(obj_via_getattr)):
kind = "method"
else:
kind = "data"
result.append((name, kind, homecls, obj))
return result
# ----------------------------------------------------------- class helpers
def _searchbases(cls, accum):
# Simulate the "classic class" search order.
if cls in accum:
return
accum.append(cls)
for base in cls.__bases__:
_searchbases(base, accum)
def getmro(cls):
"Return tuple of base classes (including cls) in method resolution order."
if hasattr(cls, "__mro__"):
return cls.__mro__
else:
result = []
_searchbases(cls, result)
return tuple(result)
# -------------------------------------------------- source code extraction
def indentsize(line):
"""Return the indent size, in spaces, at the start of a line of text."""
expline = string.expandtabs(line)
return len(expline) - len(string.lstrip(expline))
def getdoc(object):
"""Get the documentation string for an object.
All tabs are expanded to spaces. To clean up docstrings that are
indented to line up with blocks of code, any whitespace than can be
uniformly removed from the second line onwards is removed."""
try:
doc = object.__doc__
except AttributeError:
return None
if not isinstance(doc, (str, unicode)):
return None
try:
lines = string.split(string.expandtabs(doc), '\n')
except UnicodeError:
return None
else:
margin = None
for line in lines[1:]:
content = len(string.lstrip(line))
if not content: continue
indent = len(line) - content
if margin is None: margin = indent
else: margin = min(margin, indent)
if margin is not None:
for i in range(1, len(lines)): lines[i] = lines[i][margin:]
return string.join(lines, '\n')
def getfile(object):
"""Work out which source or compiled file an object was defined in."""
if ismodule(object):
if hasattr(object, '__file__'):
return object.__file__
raise TypeError, 'arg is a built-in module'
if isclass(object):
object = sys.modules.get(object.__module__)
if hasattr(object, '__file__'):
return object.__file__
raise TypeError, 'arg is a built-in class'
if ismethod(object):
object = object.im_func
if isfunction(object):
object = object.func_code
if istraceback(object):
object = object.tb_frame
if isframe(object):
object = object.f_code
if iscode(object):
return object.co_filename
raise TypeError, 'arg is not a module, class, method, ' \
'function, traceback, frame, or code object'
def getmoduleinfo(path):
"""Get the module name, suffix, mode, and module type for a given file."""
filename = os.path.basename(path)
suffixes = map(lambda (suffix, mode, mtype):
(-len(suffix), suffix, mode, mtype), imp.get_suffixes())
suffixes.sort() # try longest suffixes first, in case they overlap
for neglen, suffix, mode, mtype in suffixes:
if filename[neglen:] == suffix:
return filename[:neglen], suffix, mode, mtype
def getmodulename(path):
"""Return the module name for a given file, or None."""
info = getmoduleinfo(path)
if info: return info[0]
def getsourcefile(object):
"""Return the Python source file an object was defined in, if it exists."""
filename = getfile(object)
if string.lower(filename[-4:]) in ['.pyc', '.pyo']:
filename = filename[:-4] + '.py'
for suffix, mode, kind in imp.get_suffixes():
if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix:
# Looks like a binary file. We want to only return a text file.
return None
if os.path.exists(filename):
return filename
def getabsfile(object):
"""Return an absolute path to the source or compiled file for an object.
The idea is for each object to have a unique origin, so this routine
normalizes the result as much as possible."""
return os.path.normcase(
os.path.abspath(getsourcefile(object) or getfile(object)))
modulesbyfile = {}
def getmodule(object):
"""Return the module an object was defined in, or None if not found."""
if ismodule(object):
return object
if isclass(object):
return sys.modules.get(object.__module__)
try:
file = getabsfile(object)
except TypeError:
return None
if modulesbyfile.has_key(file):
return sys.modules[modulesbyfile[file]]
for module in sys.modules.values():
if hasattr(module, '__file__'):
modulesbyfile[getabsfile(module)] = module.__name__
if modulesbyfile.has_key(file):
return sys.modules[modulesbyfile[file]]
main = sys.modules['__main__']
if hasattr(main, object.__name__):
mainobject = getattr(main, object.__name__)
if mainobject is object:
return main
builtin = sys.modules['__builtin__']
if hasattr(builtin, object.__name__):
builtinobject = getattr(builtin, object.__name__)
if builtinobject is object:
return builtin
def findsource(object):
"""Return the entire source file and starting line number for an object.
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a list of all the lines
in the file and the line number indexes a line in that list. An IOError
is raised if the source code cannot be retrieved."""
try:
file = open(getsourcefile(object))
except (TypeError, IOError):
raise IOError, 'could not get source code'
lines = file.readlines()
file.close()
if ismodule(object):
return lines, 0
if isclass(object):
name = object.__name__
pat = re.compile(r'^\s*class\s*' + name + r'\b')
for i in range(len(lines)):
if pat.match(lines[i]): return lines, i
else: raise IOError, 'could not find class definition'
if ismethod(object):
object = object.im_func
if isfunction(object):
object = object.func_code
if istraceback(object):
object = object.tb_frame
if isframe(object):
object = object.f_code
if iscode(object):
if not hasattr(object, 'co_firstlineno'):
raise IOError, 'could not find function definition'
lnum = object.co_firstlineno - 1
pat = re.compile(r'^(\s*def\s)|(.*\slambda(:|\s))')
while lnum > 0:
if pat.match(lines[lnum]): break
lnum = lnum - 1
return lines, lnum
raise IOError, 'could not find code object'
def getcomments(object):
"""Get lines of comments immediately preceding an object's source code."""
try: lines, lnum = findsource(object)
except IOError: return None
if ismodule(object):
# Look for a comment block at the top of the file.
start = 0
if lines and lines[0][:2] == '#!': start = 1
while start < len(lines) and string.strip(lines[start]) in ['', '#']:
start = start + 1
if start < len(lines) and lines[start][:1] == '#':
comments = []
end = start
while end < len(lines) and lines[end][:1] == '#':
comments.append(string.expandtabs(lines[end]))
end = end + 1
return string.join(comments, '')
# Look for a preceding block of comments at the same indentation.
elif lnum > 0:
indent = indentsize(lines[lnum])
end = lnum - 1
if end >= 0 and string.lstrip(lines[end])[:1] == '#' and \
indentsize(lines[end]) == indent:
comments = [string.lstrip(string.expandtabs(lines[end]))]
if end > 0:
end = end - 1
comment = string.lstrip(string.expandtabs(lines[end]))
while comment[:1] == '#' and indentsize(lines[end]) == indent:
comments[:0] = [comment]
end = end - 1
if end < 0: break
comment = string.lstrip(string.expandtabs(lines[end]))
while comments and string.strip(comments[0]) == '#':
comments[:1] = []
while comments and string.strip(comments[-1]) == '#':
comments[-1:] = []
return string.join(comments, '')
class ListReader:
"""Provide a readline() method to return lines from a list of strings."""
def __init__(self, lines):
self.lines = lines
self.index = 0
def readline(self):
i = self.index
if i < len(self.lines):
self.index = i + 1
return self.lines[i]
else: return ''
class EndOfBlock(Exception): pass
class BlockFinder:
"""Provide a tokeneater() method to detect the end of a code block."""
def __init__(self):
self.indent = 0
self.started = 0
self.last = 0
def tokeneater(self, type, token, (srow, scol), (erow, ecol), line):
if not self.started:
if type == tokenize.NAME: self.started = 1
elif type == tokenize.NEWLINE:
self.last = srow
elif type == tokenize.INDENT:
self.indent = self.indent + 1
elif type == tokenize.DEDENT:
self.indent = self.indent - 1
if self.indent == 0: raise EndOfBlock, self.last
elif type == tokenize.NAME and scol == 0:
raise EndOfBlock, self.last
def getblock(lines):
"""Extract the block of code at the top of the given list of lines."""
try:
tokenize.tokenize(ListReader(lines).readline, BlockFinder().tokeneater)
except EndOfBlock, eob:
return lines[:eob.args[0]]
# Fooling the indent/dedent logic implies a one-line definition
return lines[:1]
def getsourcelines(object):
"""Return a list of source lines and starting line number for an object.
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a list of the lines
corresponding to the object and the line number indicates where in the
original source file the first line of code was found. An IOError is
raised if the source code cannot be retrieved."""
lines, lnum = findsource(object)
if ismodule(object): return lines, 0
else: return getblock(lines[lnum:]), lnum + 1
def getsource(object):
"""Return the text of the source code for an object.
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a single string. An
IOError is raised if the source code cannot be retrieved."""
lines, lnum = getsourcelines(object)
return string.join(lines, '')
# --------------------------------------------------- class tree extraction
def walktree(classes, children, parent):
"""Recursive helper function for getclasstree()."""
results = []
classes.sort(lambda a, b: cmp(a.__name__, b.__name__))
for c in classes:
results.append((c, c.__bases__))
if children.has_key(c):
results.append(walktree(children[c], children, c))
return results
def getclasstree(classes, unique=0):
"""Arrange the given list of classes into a hierarchy of nested lists.
Where a nested list appears, it contains classes derived from the class
whose entry immediately precedes the list. Each entry is a 2-tuple
containing a class and a tuple of its base classes. If the 'unique'
argument is true, exactly one entry appears in the returned structure
for each class in the given list. Otherwise, classes using multiple
inheritance and their descendants will appear multiple times."""
children = {}
roots = []
for c in classes:
if c.__bases__:
for parent in c.__bases__:
if not children.has_key(parent):
children[parent] = []
children[parent].append(c)
if unique and parent in classes: break
elif c not in roots:
roots.append(c)
for parent in children.keys():
if parent not in classes:
roots.append(parent)
return walktree(roots, children, None)
# ------------------------------------------------ argument list extraction
# These constants are from Python's compile.h.
CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS = 1, 2, 4, 8
def getargs(co):
"""Get information about the arguments accepted by a code object.
Three things are returned: (args, varargs, varkw), where 'args' is
a list of argument names (possibly containing nested lists), and
'varargs' and 'varkw' are the names of the * and ** arguments or None."""
if not iscode(co): raise TypeError, 'arg is not a code object'
nargs = co.co_argcount
names = co.co_varnames
args = list(names[:nargs])
step = 0
# The following acrobatics are for anonymous (tuple) arguments.
if not sys.platform.startswith('java'):#Jython doesn't have co_code
code = co.co_code
import dis
for i in range(nargs):
if args[i][:1] in ['', '.']:
stack, remain, count = [], [], []
while step < len(code):
op = ord(code[step])
step = step + 1
if op >= dis.HAVE_ARGUMENT:
opname = dis.opname[op]
value = ord(code[step]) + ord(code[step + 1]) * 256
step = step + 2
if opname in ['UNPACK_TUPLE', 'UNPACK_SEQUENCE']:
remain.append(value)
count.append(value)
elif opname == 'STORE_FAST':
stack.append(names[value])
remain[-1] = remain[-1] - 1
while remain[-1] == 0:
remain.pop()
size = count.pop()
stack[-size:] = [stack[-size:]]
if not remain: break
remain[-1] = remain[-1] - 1
if not remain: break
args[i] = stack[0]
varargs = None
if co.co_flags & CO_VARARGS:
varargs = co.co_varnames[nargs]
nargs = nargs + 1
varkw = None
if co.co_flags & CO_VARKEYWORDS:
varkw = co.co_varnames[nargs]
return args, varargs, varkw
def getargspec(func):
"""Get the names and default values of a function's arguments.
A tuple of four things is returned: (args, varargs, varkw, defaults).
'args' is a list of the argument names (it may contain nested lists).
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'defaults' is an n-tuple of the default values of the last n arguments."""
if ismethod(func):
func = func.im_func
if not isfunction(func): raise TypeError, 'arg is not a Python function'
args, varargs, varkw = getargs(func.func_code)
return args, varargs, varkw, func.func_defaults
def getargvalues(frame):
"""Get information about arguments passed into a particular frame.
A tuple of four things is returned: (args, varargs, varkw, locals).
'args' is a list of the argument names (it may contain nested lists).
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'locals' is the locals dictionary of the given frame."""
args, varargs, varkw = getargs(frame.f_code)
return args, varargs, varkw, frame.f_locals
def joinseq(seq):
if len(seq) == 1:
return '(' + seq[0] + ',)'
else:
return '(' + string.join(seq, ', ') + ')'
def strseq(object, convert, join=joinseq):
"""Recursively walk a sequence, stringifying each element."""
if type(object) in [types.ListType, types.TupleType]:
return join(map(lambda o, c=convert, j=join: strseq(o, c, j), object))
else:
return convert(object)
def formatargspec(args, varargs=None, varkw=None, defaults=None,
formatarg=str,
formatvarargs=lambda name: '*' + name,
formatvarkw=lambda name: '**' + name,
formatvalue=lambda value: '=' + repr(value),
join=joinseq):
"""Format an argument spec from the 4 values returned by getargspec.
The first four arguments are (args, varargs, varkw, defaults). The
other four arguments are the corresponding optional formatting functions
that are called to turn names and values into strings. The ninth
argument is an optional function to format the sequence of arguments."""
specs = []
if defaults:
firstdefault = len(args) - len(defaults)
for i in range(len(args)):
spec = strseq(args[i], formatarg, join)
if defaults and i >= firstdefault:
spec = spec + formatvalue(defaults[i - firstdefault])
specs.append(spec)
if varargs:
specs.append(formatvarargs(varargs))
if varkw:
specs.append(formatvarkw(varkw))
return '(' + string.join(specs, ', ') + ')'
def formatargvalues(args, varargs, varkw, locals,
formatarg=str,
formatvarargs=lambda name: '*' + name,
formatvarkw=lambda name: '**' + name,
formatvalue=lambda value: '=' + repr(value),
join=joinseq):
"""Format an argument spec from the 4 values returned by getargvalues.
The first four arguments are (args, varargs, varkw, locals). The
next four arguments are the corresponding optional formatting functions
that are called to turn names and values into strings. The ninth
argument is an optional function to format the sequence of arguments."""
def convert(name, locals=locals,
formatarg=formatarg, formatvalue=formatvalue):
return formatarg(name) + formatvalue(locals[name])
specs = []
for i in range(len(args)):
specs.append(strseq(args[i], convert, join))
if varargs:
specs.append(formatvarargs(varargs) + formatvalue(locals[varargs]))
if varkw:
specs.append(formatvarkw(varkw) + formatvalue(locals[varkw]))
return '(' + string.join(specs, ', ') + ')'
# -------------------------------------------------- stack frame extraction
def getframeinfo(frame, context=1):
"""Get information about a frame or traceback object.
A tuple of five things is returned: the filename, the line number of
the current line, the function name, a list of lines of context from
the source code, and the index of the current line within that list.
The optional second argument specifies the number of lines of context
to return, which are centered around the current line."""
raise NotImplementedError
# if istraceback(frame):
# frame = frame.tb_frame
# if not isframe(frame):
# raise TypeError, 'arg is not a frame or traceback object'
#
# filename = getsourcefile(frame)
# lineno = getlineno(frame)
# if context > 0:
# start = lineno - 1 - context//2
# try:
# lines, lnum = findsource(frame)
# except IOError:
# lines = index = None
# else:
# start = max(start, 1)
# start = min(start, len(lines) - context)
# lines = lines[start:start+context]
# index = lineno - 1 - start
# else:
# lines = index = None
#
# return (filename, lineno, frame.f_code.co_name, lines, index)
def getlineno(frame):
"""Get the line number from a frame object, allowing for optimization."""
# Written by Marc-Andr Lemburg; revised by Jim Hugunin and Fredrik Lundh.
lineno = frame.f_lineno
code = frame.f_code
if hasattr(code, 'co_lnotab'):
table = code.co_lnotab
lineno = code.co_firstlineno
addr = 0
for i in range(0, len(table), 2):
addr = addr + ord(table[i])
if addr > frame.f_lasti: break
lineno = lineno + ord(table[i + 1])
return lineno
def getouterframes(frame, context=1):
"""Get a list of records for a frame and all higher (calling) frames.
Each record contains a frame object, filename, line number, function
name, a list of lines of context, and index within the context."""
framelist = []
while frame:
framelist.append((frame,) + getframeinfo(frame, context))
frame = frame.f_back
return framelist
def getinnerframes(tb, context=1):
"""Get a list of records for a traceback's frame and all lower frames.
Each record contains a frame object, filename, line number, function
name, a list of lines of context, and index within the context."""
framelist = []
while tb:
framelist.append((tb.tb_frame,) + getframeinfo(tb, context))
tb = tb.tb_next
return framelist
def currentframe():
"""Return the frame object for the caller's stack frame."""
try:
raise 'catch me'
except:
return sys.exc_traceback.tb_frame.f_back #@UndefinedVariable
if hasattr(sys, '_getframe'): currentframe = sys._getframe
def stack(context=1):
"""Return a list of records for the stack above the caller's frame."""
return getouterframes(currentframe().f_back, context)
def trace(context=1):
"""Return a list of records for the stack below the current exception."""
return getinnerframes(sys.exc_traceback, context) #@UndefinedVariable

View file

@ -1,591 +0,0 @@
"""Utilities to support packages."""
# NOTE: This module must remain compatible with Python 2.3, as it is shared
# by setuptools for distribution with Python 2.3 and up.
import os
import sys
import imp
import os.path
from types import ModuleType
__all__ = [
'get_importer', 'iter_importers', 'get_loader', 'find_loader',
'walk_packages', 'iter_modules', 'get_data',
'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
]
def read_code(stream):
# This helper is needed in order for the PEP 302 emulation to
# correctly handle compiled files
import marshal
magic = stream.read(4)
if magic != imp.get_magic():
return None
stream.read(4) # Skip timestamp
return marshal.load(stream)
def simplegeneric(func):
"""Make a trivial single-dispatch generic function"""
registry = {}
def wrapper(*args, **kw):
ob = args[0]
try:
cls = ob.__class__
except AttributeError:
cls = type(ob)
try:
mro = cls.__mro__
except AttributeError:
try:
class cls(cls, object):
pass
mro = cls.__mro__[1:]
except TypeError:
mro = object, # must be an ExtensionClass or some such :(
for t in mro:
if t in registry:
return registry[t](*args, **kw)
else:
return func(*args, **kw)
try:
wrapper.__name__ = func.__name__
except (TypeError, AttributeError):
pass # Python 2.3 doesn't allow functions to be renamed
def register(typ, func=None):
if func is None:
return lambda f: register(typ, f)
registry[typ] = func
return func
wrapper.__dict__ = func.__dict__
wrapper.__doc__ = func.__doc__
wrapper.register = register
return wrapper
def walk_packages(path=None, prefix='', onerror=None):
"""Yields (module_loader, name, ispkg) for all modules recursively
on path, or, if path is None, all accessible modules.
'path' should be either None or a list of paths to look for
modules in.
'prefix' is a string to output on the front of every module name
on output.
Note that this function must import all *packages* (NOT all
modules!) on the given path, in order to access the __path__
attribute to find submodules.
'onerror' is a function which gets called with one argument (the
name of the package which was being imported) if any exception
occurs while trying to import a package. If no onerror function is
supplied, ImportErrors are caught and ignored, while all other
exceptions are propagated, terminating the search.
Examples:
# list all modules python can access
walk_packages()
# list all submodules of ctypes
walk_packages(ctypes.__path__, ctypes.__name__+'.')
"""
def seen(p, m={}):
if p in m:
return True
m[p] = True
for importer, name, ispkg in iter_modules(path, prefix):
yield importer, name, ispkg
if ispkg:
try:
__import__(name)
except ImportError:
if onerror is not None:
onerror(name)
except Exception:
if onerror is not None:
onerror(name)
else:
raise
else:
path = getattr(sys.modules[name], '__path__', None) or []
# don't traverse path items we've seen before
path = [p for p in path if not seen(p)]
for item in walk_packages(path, name+'.', onerror):
yield item
def iter_modules(path=None, prefix=''):
"""Yields (module_loader, name, ispkg) for all submodules on path,
or, if path is None, all top-level modules on sys.path.
'path' should be either None or a list of paths to look for
modules in.
'prefix' is a string to output on the front of every module name
on output.
"""
if path is None:
importers = iter_importers()
else:
importers = map(get_importer, path)
yielded = {}
for i in importers:
for name, ispkg in iter_importer_modules(i, prefix):
if name not in yielded:
yielded[name] = 1
yield i, name, ispkg
#@simplegeneric
def iter_importer_modules(importer, prefix=''):
if not hasattr(importer, 'iter_modules'):
return []
return importer.iter_modules(prefix)
iter_importer_modules = simplegeneric(iter_importer_modules)
class ImpImporter:
"""PEP 302 Importer that wraps Python's "classic" import algorithm
ImpImporter(dirname) produces a PEP 302 importer that searches that
directory. ImpImporter(None) produces a PEP 302 importer that searches
the current sys.path, plus any modules that are frozen or built-in.
Note that ImpImporter does not currently support being used by placement
on sys.meta_path.
"""
def __init__(self, path=None):
self.path = path
def find_module(self, fullname, path=None):
# Note: we ignore 'path' argument since it is only used via meta_path
subname = fullname.split(".")[-1]
if subname != fullname and self.path is None:
return None
if self.path is None:
path = None
else:
path = [os.path.realpath(self.path)]
try:
file, filename, etc = imp.find_module(subname, path)
except ImportError:
return None
return ImpLoader(fullname, file, filename, etc)
def iter_modules(self, prefix=''):
if self.path is None or not os.path.isdir(self.path):
return
yielded = {}
import inspect
try:
filenames = os.listdir(self.path)
except OSError:
# ignore unreadable directories like import does
filenames = []
filenames.sort() # handle packages before same-named modules
for fn in filenames:
modname = inspect.getmodulename(fn)
if modname=='__init__' or modname in yielded:
continue
path = os.path.join(self.path, fn)
ispkg = False
if not modname and os.path.isdir(path) and '.' not in fn:
modname = fn
try:
dircontents = os.listdir(path)
except OSError:
# ignore unreadable directories like import does
dircontents = []
for fn in dircontents:
subname = inspect.getmodulename(fn)
if subname=='__init__':
ispkg = True
break
else:
continue # not a package
if modname and '.' not in modname:
yielded[modname] = 1
yield prefix + modname, ispkg
class ImpLoader:
"""PEP 302 Loader that wraps Python's "classic" import algorithm
"""
code = source = None
def __init__(self, fullname, file, filename, etc):
self.file = file
self.filename = filename
self.fullname = fullname
self.etc = etc
def load_module(self, fullname):
self._reopen()
try:
mod = imp.load_module(fullname, self.file, self.filename, self.etc)
finally:
if self.file:
self.file.close()
# Note: we don't set __loader__ because we want the module to look
# normal; i.e. this is just a wrapper for standard import machinery
return mod
def get_data(self, pathname):
return open(pathname, "rb").read()
def _reopen(self):
if self.file and self.file.closed:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
self.file = open(self.filename, 'rU')
elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
self.file = open(self.filename, 'rb')
def _fix_name(self, fullname):
if fullname is None:
fullname = self.fullname
elif fullname != self.fullname:
raise ImportError("Loader for module %s cannot handle "
"module %s" % (self.fullname, fullname))
return fullname
def is_package(self, fullname):
fullname = self._fix_name(fullname)
return self.etc[2]==imp.PKG_DIRECTORY
def get_code(self, fullname=None):
fullname = self._fix_name(fullname)
if self.code is None:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
source = self.get_source(fullname)
self.code = compile(source, self.filename, 'exec')
elif mod_type==imp.PY_COMPILED:
self._reopen()
try:
self.code = read_code(self.file)
finally:
self.file.close()
elif mod_type==imp.PKG_DIRECTORY:
self.code = self._get_delegate().get_code()
return self.code
def get_source(self, fullname=None):
fullname = self._fix_name(fullname)
if self.source is None:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
self._reopen()
try:
self.source = self.file.read()
finally:
self.file.close()
elif mod_type==imp.PY_COMPILED:
if os.path.exists(self.filename[:-1]):
f = open(self.filename[:-1], 'rU')
self.source = f.read()
f.close()
elif mod_type==imp.PKG_DIRECTORY:
self.source = self._get_delegate().get_source()
return self.source
def _get_delegate(self):
return ImpImporter(self.filename).find_module('__init__')
def get_filename(self, fullname=None):
fullname = self._fix_name(fullname)
mod_type = self.etc[2]
if self.etc[2]==imp.PKG_DIRECTORY:
return self._get_delegate().get_filename()
elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
return self.filename
return None
try:
import zipimport
from zipimport import zipimporter
def iter_zipimport_modules(importer, prefix=''):
dirlist = zipimport._zip_directory_cache[importer.archive].keys()
dirlist.sort()
_prefix = importer.prefix
plen = len(_prefix)
yielded = {}
import inspect
for fn in dirlist:
if not fn.startswith(_prefix):
continue
fn = fn[plen:].split(os.sep)
if len(fn)==2 and fn[1].startswith('__init__.py'):
if fn[0] not in yielded:
yielded[fn[0]] = 1
yield fn[0], True
if len(fn)!=1:
continue
modname = inspect.getmodulename(fn[0])
if modname=='__init__':
continue
if modname and '.' not in modname and modname not in yielded:
yielded[modname] = 1
yield prefix + modname, False
iter_importer_modules.register(zipimporter, iter_zipimport_modules)
except ImportError:
pass
def get_importer(path_item):
"""Retrieve a PEP 302 importer for the given path item
The returned importer is cached in sys.path_importer_cache
if it was newly created by a path hook.
If there is no importer, a wrapper around the basic import
machinery is returned. This wrapper is never inserted into
the importer cache (None is inserted instead).
The cache (or part of it) can be cleared manually if a
rescan of sys.path_hooks is necessary.
"""
try:
importer = sys.path_importer_cache[path_item]
except KeyError:
for path_hook in sys.path_hooks:
try:
importer = path_hook(path_item)
break
except ImportError:
pass
else:
importer = None
sys.path_importer_cache.setdefault(path_item, importer)
if importer is None:
try:
importer = ImpImporter(path_item)
except ImportError:
importer = None
return importer
def iter_importers(fullname=""):
"""Yield PEP 302 importers for the given module name
If fullname contains a '.', the importers will be for the package
containing fullname, otherwise they will be importers for sys.meta_path,
sys.path, and Python's "classic" import machinery, in that order. If
the named module is in a package, that package is imported as a side
effect of invoking this function.
Non PEP 302 mechanisms (e.g. the Windows registry) used by the
standard import machinery to find files in alternative locations
are partially supported, but are searched AFTER sys.path. Normally,
these locations are searched BEFORE sys.path, preventing sys.path
entries from shadowing them.
For this to cause a visible difference in behaviour, there must
be a module or package name that is accessible via both sys.path
and one of the non PEP 302 file system mechanisms. In this case,
the emulation will find the former version, while the builtin
import mechanism will find the latter.
Items of the following types can be affected by this discrepancy:
imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY
"""
if fullname.startswith('.'):
raise ImportError("Relative module names not supported")
if '.' in fullname:
# Get the containing package's __path__
pkg = '.'.join(fullname.split('.')[:-1])
if pkg not in sys.modules:
__import__(pkg)
path = getattr(sys.modules[pkg], '__path__', None) or []
else:
for importer in sys.meta_path:
yield importer
path = sys.path
for item in path:
yield get_importer(item)
if '.' not in fullname:
yield ImpImporter()
def get_loader(module_or_name):
"""Get a PEP 302 "loader" object for module_or_name
If the module or package is accessible via the normal import
mechanism, a wrapper around the relevant part of that machinery
is returned. Returns None if the module cannot be found or imported.
If the named module is not already imported, its containing package
(if any) is imported, in order to establish the package __path__.
This function uses iter_importers(), and is thus subject to the same
limitations regarding platform-specific special import locations such
as the Windows registry.
"""
if module_or_name in sys.modules:
module_or_name = sys.modules[module_or_name]
if isinstance(module_or_name, ModuleType):
module = module_or_name
loader = getattr(module, '__loader__', None)
if loader is not None:
return loader
fullname = module.__name__
else:
fullname = module_or_name
return find_loader(fullname)
def find_loader(fullname):
"""Find a PEP 302 "loader" object for fullname
If fullname contains dots, path must be the containing package's __path__.
Returns None if the module cannot be found or imported. This function uses
iter_importers(), and is thus subject to the same limitations regarding
platform-specific special import locations such as the Windows registry.
"""
for importer in iter_importers(fullname):
loader = importer.find_module(fullname)
if loader is not None:
return loader
return None
def extend_path(path, name):
"""Extend a package's path.
Intended use is to place the following code in a package's __init__.py:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
This will add to the package's __path__ all subdirectories of
directories on sys.path named after the package. This is useful
if one wants to distribute different parts of a single logical
package as multiple directories.
It also looks for *.pkg files beginning where * matches the name
argument. This feature is similar to *.pth files (see site.py),
except that it doesn't special-case lines starting with 'import'.
A *.pkg file is trusted at face value: apart from checking for
duplicates, all entries found in a *.pkg file are added to the
path, regardless of whether they are exist the filesystem. (This
is a feature.)
If the input path is not a list (as is the case for frozen
packages) it is returned unchanged. The input path is not
modified; an extended copy is returned. Items are only appended
to the copy at the end.
It is assumed that sys.path is a sequence. Items of sys.path that
are not (unicode or 8-bit) strings referring to existing
directories are ignored. Unicode items of sys.path that cause
errors when used as filenames may cause this function to raise an
exception (in line with os.path.isdir() behavior).
"""
if not isinstance(path, list):
# This could happen e.g. when this is called from inside a
# frozen package. Return the path unchanged in that case.
return path
pname = os.path.join(*name.split('.')) # Reconstitute as relative path
# Just in case os.extsep != '.'
sname = os.extsep.join(name.split('.'))
sname_pkg = sname + os.extsep + "pkg"
init_py = "__init__" + os.extsep + "py"
path = path[:] # Start with a copy of the existing path
for dir in sys.path:
if not isinstance(dir, basestring) or not os.path.isdir(dir):
continue
subdir = os.path.join(dir, pname)
# XXX This may still add duplicate entries to path on
# case-insensitive filesystems
initfile = os.path.join(subdir, init_py)
if subdir not in path and os.path.isfile(initfile):
path.append(subdir)
# XXX Is this the right thing for subpackages like zope.app?
# It looks for a file named "zope.app.pkg"
pkgfile = os.path.join(dir, sname_pkg)
if os.path.isfile(pkgfile):
try:
f = open(pkgfile)
except IOError, msg:
sys.stderr.write("Can't open %s: %s\n" %
(pkgfile, msg))
else:
for line in f:
line = line.rstrip('\n')
if not line or line.startswith('#'):
continue
path.append(line) # Don't check for existence!
f.close()
return path
def get_data(package, resource):
"""Get a resource from a package.
This is a wrapper round the PEP 302 loader get_data API. The package
argument should be the name of a package, in standard module format
(foo.bar). The resource argument should be in the form of a relative
filename, using '/' as the path separator. The parent directory name '..'
is not allowed, and nor is a rooted name (starting with a '/').
The function returns a binary string, which is the contents of the
specified resource.
For packages located in the filesystem, which have already been imported,
this is the rough equivalent of
d = os.path.dirname(sys.modules[package].__file__)
data = open(os.path.join(d, resource), 'rb').read()
If the package cannot be located or loaded, or it uses a PEP 302 loader
which does not support get_data(), then None is returned.
"""
loader = get_loader(package)
if loader is None or not hasattr(loader, 'get_data'):
return None
mod = sys.modules.get(package) or loader.load_module(package)
if mod is None or not hasattr(mod, '__file__'):
return None
# Modify the resource name to be compatible with the loader.get_data
# signature - an os.path format "filename" starting with the dirname of
# the package's __file__
parts = resource.split('/')
parts.insert(0, os.path.dirname(mod.__file__))
resource_name = os.path.join(*parts)
return loader.get_data(resource_name)

View file

@ -1,8 +1,6 @@
import sys
import os
IS_PY2 = sys.version_info < (3,)
def find_in_pythonpath(module_name):
# Check all the occurrences where we could match the given module/package in the PYTHONPATH.
@ -92,36 +90,20 @@ with VerifyShadowedImport('select') as verify_shadowed:
with VerifyShadowedImport('code') as verify_shadowed:
import code as _code; verify_shadowed.check(_code, ['compile_command', 'InteractiveInterpreter'])
if IS_PY2:
with VerifyShadowedImport('thread') as verify_shadowed:
import thread; verify_shadowed.check(thread, ['start_new_thread', 'allocate_lock'])
with VerifyShadowedImport('_thread') as verify_shadowed:
import _thread as thread; verify_shadowed.check(thread, ['start_new_thread', 'start_new', 'allocate_lock'])
with VerifyShadowedImport('Queue') as verify_shadowed:
import Queue as _queue; verify_shadowed.check(_queue, ['Queue', 'LifoQueue', 'Empty', 'Full', 'deque'])
with VerifyShadowedImport('queue') as verify_shadowed:
import queue as _queue; verify_shadowed.check(_queue, ['Queue', 'LifoQueue', 'Empty', 'Full', 'deque'])
with VerifyShadowedImport('xmlrpclib') as verify_shadowed:
import xmlrpclib; verify_shadowed.check(xmlrpclib, ['ServerProxy', 'Marshaller', 'Server'])
with VerifyShadowedImport('xmlrpclib') as verify_shadowed:
import xmlrpc.client as xmlrpclib; verify_shadowed.check(xmlrpclib, ['ServerProxy', 'Marshaller', 'Server'])
with VerifyShadowedImport('SimpleXMLRPCServer') as verify_shadowed:
import SimpleXMLRPCServer as _pydev_SimpleXMLRPCServer; verify_shadowed.check(_pydev_SimpleXMLRPCServer, ['SimpleXMLRPCServer'])
with VerifyShadowedImport('xmlrpc.server') as verify_shadowed:
import xmlrpc.server as xmlrpcserver; verify_shadowed.check(xmlrpcserver, ['SimpleXMLRPCServer'])
with VerifyShadowedImport('BaseHTTPServer') as verify_shadowed:
import BaseHTTPServer; verify_shadowed.check(BaseHTTPServer, ['BaseHTTPRequestHandler'])
else:
with VerifyShadowedImport('_thread') as verify_shadowed:
import _thread as thread; verify_shadowed.check(thread, ['start_new_thread', 'start_new', 'allocate_lock'])
with VerifyShadowedImport('queue') as verify_shadowed:
import queue as _queue; verify_shadowed.check(_queue, ['Queue', 'LifoQueue', 'Empty', 'Full', 'deque'])
with VerifyShadowedImport('xmlrpclib') as verify_shadowed:
import xmlrpc.client as xmlrpclib; verify_shadowed.check(xmlrpclib, ['ServerProxy', 'Marshaller', 'Server'])
with VerifyShadowedImport('xmlrpc.server') as verify_shadowed:
import xmlrpc.server as _pydev_SimpleXMLRPCServer; verify_shadowed.check(_pydev_SimpleXMLRPCServer, ['SimpleXMLRPCServer'])
with VerifyShadowedImport('http.server') as verify_shadowed:
import http.server as BaseHTTPServer; verify_shadowed.check(BaseHTTPServer, ['BaseHTTPRequestHandler'])
with VerifyShadowedImport('http.server') as verify_shadowed:
import http.server as BaseHTTPServer; verify_shadowed.check(BaseHTTPServer, ['BaseHTTPRequestHandler'])
# If set, this is a version of the threading.enumerate that doesn't have the patching to remove the pydevd threads.
# Note: as it can't be set during execution, don't import the name (import the module and access it through its name).

View file

@ -3,14 +3,17 @@ import sys
def patch_sys_module():
def patched_exc_info(fun):
def pydev_debugger_exc_info():
type, value, traceback = fun()
if type == ImportError:
#we should not show frame added by plugin_import call
# we should not show frame added by plugin_import call
if traceback and hasattr(traceback, "tb_next"):
return type, value, traceback.tb_next
return type, value, traceback
return pydev_debugger_exc_info
system_exc_info = sys.exc_info
@ -20,19 +23,18 @@ def patch_sys_module():
def patched_reload(orig_reload):
def pydev_debugger_reload(module):
orig_reload(module)
if module.__name__ == "sys":
# if sys module was reloaded we should patch it again
patch_sys_module()
return pydev_debugger_reload
def patch_reload():
if sys.version_info[0] >= 3:
import builtins # Py3
else:
import __builtin__ as builtins
import builtins # Py3
if hasattr(builtins, "reload"):
sys.builtin_orig_reload = builtins.reload
@ -56,10 +58,7 @@ def patch_reload():
def cancel_patches_in_sys_module():
sys.exc_info = sys.system_exc_info # @UndefinedVariable
if sys.version_info[0] >= 3:
import builtins # Py3
else:
import __builtin__ as builtins
import builtins # Py3
if hasattr(sys, "builtin_orig_reload"):
builtins.reload = sys.builtin_orig_reload

File diff suppressed because it is too large Load diff

View file

@ -5,6 +5,8 @@ from _pydev_runfiles import pydev_runfiles_xml_rpc
import time
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support
from contextlib import contextmanager
from io import StringIO
import traceback
#=======================================================================================================================
@ -91,7 +93,7 @@ class PydevPlugin(Plugin):
address = f, '?'
except:
sys.stderr.write("PyDev: Internal pydev error getting test address. Please report at the pydev bug tracker\n")
import traceback;traceback.print_exc()
traceback.print_exc()
sys.stderr.write("\n\n\n")
address = '?', '?'
return address
@ -121,15 +123,11 @@ class PydevPlugin(Plugin):
if len(err) != 3:
if len(err) == 2:
return err[1] # multiprocess
try:
from StringIO import StringIO
except:
from io import StringIO
s = StringIO()
etype, value, tb = err
if isinstance(value, str):
return value
import traceback;traceback.print_exception(etype, value, tb, file=s)
traceback.print_exception(etype, value, tb, file=s)
return s.getvalue()
return err

View file

@ -5,18 +5,18 @@ import warnings
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
from _pydev_bundle.pydev_imports import xmlrpclib, _queue
from _pydevd_bundle.pydevd_constants import Null, IS_PY3K
from _pydevd_bundle.pydevd_constants import Null
Queue = _queue.Queue
#This may happen in IronPython (in Python it shouldn't happen as there are
#'fast' replacements that are used in xmlrpclib.py)
# This may happen in IronPython (in Python it shouldn't happen as there are
# 'fast' replacements that are used in xmlrpclib.py)
warnings.filterwarnings(
'ignore', 'The xmllib module is obsolete.*', DeprecationWarning)
file_system_encoding = getfilesystemencoding()
#=======================================================================================================================
# _ServerHolder
#=======================================================================================================================
@ -34,7 +34,6 @@ def set_server(server):
_ServerHolder.SERVER = server
#=======================================================================================================================
# ParallelNotification
#=======================================================================================================================
@ -48,7 +47,6 @@ class ParallelNotification(object):
return self.method, self.args
#=======================================================================================================================
# KillServer
#=======================================================================================================================
@ -61,26 +59,21 @@ class KillServer(object):
#=======================================================================================================================
class ServerFacade(object):
def __init__(self, notifications_queue):
self.notifications_queue = notifications_queue
def notifyTestsCollected(self, *args):
self.notifications_queue.put_nowait(ParallelNotification('notifyTestsCollected', args))
def notifyConnected(self, *args):
self.notifications_queue.put_nowait(ParallelNotification('notifyConnected', args))
def notifyTestRunFinished(self, *args):
self.notifications_queue.put_nowait(ParallelNotification('notifyTestRunFinished', args))
def notifyStartTest(self, *args):
self.notifications_queue.put_nowait(ParallelNotification('notifyStartTest', args))
def notifyTest(self, *args):
new_args = []
for arg in args:
@ -89,19 +82,14 @@ class ServerFacade(object):
self.notifications_queue.put_nowait(ParallelNotification('notifyTest', args))
#=======================================================================================================================
# ServerComm
#=======================================================================================================================
class ServerComm(threading.Thread):
def __init__(self, notifications_queue, port, daemon=False):
threading.Thread.__init__(self)
self.setDaemon(daemon) # If False, wait for all the notifications to be passed before exiting!
self.setDaemon(daemon) # If False, wait for all the notifications to be passed before exiting!
self.finished = False
self.notifications_queue = notifications_queue
@ -126,7 +114,6 @@ class ServerComm(threading.Thread):
self.server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), port),
encoding=encoding)
def run(self):
while True:
kill_found = False
@ -140,15 +127,14 @@ class ServerComm(threading.Thread):
try:
while True:
command = self.notifications_queue.get(block=False) #No block to create a batch.
command = self.notifications_queue.get(block=False) # No block to create a batch.
if isinstance(command, KillServer):
kill_found = True
else:
assert isinstance(command, ParallelNotification)
commands.append(command.to_tuple())
except:
pass #That's OK, we're getting it until it becomes empty so that we notify multiple at once.
pass # That's OK, we're getting it until it becomes empty so that we notify multiple at once.
if commands:
try:
@ -161,7 +147,6 @@ class ServerComm(threading.Thread):
return
#=======================================================================================================================
# initialize_server
#=======================================================================================================================
@ -173,7 +158,7 @@ def initialize_server(port, daemon=False):
_ServerHolder.SERVER_COMM = ServerComm(notifications_queue, port, daemon)
_ServerHolder.SERVER_COMM.start()
else:
#Create a null server, so that we keep the interface even without any connection.
# Create a null server, so that we keep the interface even without any connection.
_ServerHolder.SERVER = Null()
_ServerHolder.SERVER_COMM = Null()
@ -183,7 +168,6 @@ def initialize_server(port, daemon=False):
traceback.print_exc()
#=======================================================================================================================
# notifyTest
#=======================================================================================================================
@ -205,7 +189,7 @@ def notifyStartTest(file, test):
'''
assert file is not None
if test is None:
test = '' #Could happen if we have an import error importing module.
test = '' # Could happen if we have an import error importing module.
try:
_ServerHolder.SERVER.notifyStartTest(file, test)
@ -215,26 +199,15 @@ def notifyStartTest(file, test):
def _encode_if_needed(obj):
# In the java side we expect strings to be ISO-8859-1 (org.python.pydev.debug.pyunit.PyUnitServer.initializeDispatches().new Dispatch() {...}.getAsStr(Object))
if not IS_PY3K:
if isinstance(obj, str):
try:
return xmlrpclib.Binary(obj.decode(sys.stdin.encoding).encode('ISO-8859-1', 'xmlcharrefreplace'))
except:
return xmlrpclib.Binary(obj)
if isinstance(obj, str): # Unicode in py3
return xmlrpclib.Binary(obj.encode('ISO-8859-1', 'xmlcharrefreplace'))
elif isinstance(obj, unicode):
return xmlrpclib.Binary(obj.encode('ISO-8859-1', 'xmlcharrefreplace'))
elif isinstance(obj, bytes):
try:
return xmlrpclib.Binary(obj.decode(sys.stdin.encoding).encode('ISO-8859-1', 'xmlcharrefreplace'))
except:
return xmlrpclib.Binary(obj) # bytes already
else:
if isinstance(obj, str): # Unicode in py3
return xmlrpclib.Binary(obj.encode('ISO-8859-1', 'xmlcharrefreplace'))
elif isinstance(obj, bytes):
try:
return xmlrpclib.Binary(obj.decode(sys.stdin.encoding).encode('ISO-8859-1', 'xmlcharrefreplace'))
except:
return xmlrpclib.Binary(obj) #bytes already
return obj
@ -255,7 +228,7 @@ def notifyTest(cond, captured_output, error_contents, file, test, time):
assert error_contents is not None
assert file is not None
if test is None:
test = '' #Could happen if we have an import error importing module.
test = '' # Could happen if we have an import error importing module.
assert time is not None
try:
captured_output = _encode_if_needed(captured_output)
@ -265,6 +238,7 @@ def notifyTest(cond, captured_output, error_contents, file, test, time):
except:
traceback.print_exc()
#=======================================================================================================================
# notifyTestRunFinished
#=======================================================================================================================

View file

@ -15,7 +15,7 @@ from _pydevd_bundle.pydevd_comm import (InternalGetThreadStack, internal_get_com
from _pydevd_bundle.pydevd_comm_constants import (CMD_THREAD_SUSPEND, file_system_encoding,
CMD_STEP_INTO_MY_CODE, CMD_STOP_ON_START, CMD_SMART_STEP_INTO)
from _pydevd_bundle.pydevd_constants import (get_current_thread_id, set_protocol, get_protocol,
HTTP_JSON_PROTOCOL, JSON_PROTOCOL, IS_PY3K, DebugInfoHolder, dict_keys, dict_items, IS_WINDOWS)
HTTP_JSON_PROTOCOL, JSON_PROTOCOL, DebugInfoHolder, IS_WINDOWS)
from _pydevd_bundle.pydevd_net_command_factory_json import NetCommandFactoryJson
from _pydevd_bundle.pydevd_net_command_factory_xml import NetCommandFactory
import pydevd_file_utils
@ -329,10 +329,7 @@ class PyDevdAPI(object):
-- in py3 raises an error if it's not str already.
'''
if s.__class__ != str:
if not IS_PY3K:
s = s.encode('utf-8')
else:
raise AssertionError('Expected to have str on Python 3. Found: %s (%s)' % (s, s.__class__))
raise AssertionError('Expected to have str on Python 3. Found: %s (%s)' % (s, s.__class__))
return s
def filename_to_str(self, filename):
@ -341,10 +338,7 @@ class PyDevdAPI(object):
-- in py3 raises an error if it's not str already.
'''
if filename.__class__ != str:
if not IS_PY3K:
filename = filename.encode(file_system_encoding)
else:
raise AssertionError('Expected to have str on Python 3. Found: %s (%s)' % (filename, filename.__class__))
raise AssertionError('Expected to have str on Python 3. Found: %s (%s)' % (filename, filename.__class__))
return filename
def filename_to_server(self, filename):
@ -578,9 +572,9 @@ class PyDevdAPI(object):
translations are applied).
'''
pydev_log.debug('Reapplying breakpoints.')
items = dict_items(py_db.api_received_breakpoints) # Create a copy with items to reapply.
values = list(py_db.api_received_breakpoints.values()) # Create a copy with items to reapply.
self.remove_all_breakpoints(py_db, '*')
for _key, val in items:
for val in values:
_new_filename, api_add_breakpoint_params = val
self.add_breakpoint(py_db, *api_add_breakpoint_params)
@ -614,7 +608,7 @@ class PyDevdAPI(object):
changed = True
else:
items = dict_items(py_db.api_received_breakpoints) # Create a copy to remove items.
items = list(py_db.api_received_breakpoints.items()) # Create a copy to remove items.
translated_filenames = []
for key, val in items:
original_filename, _breakpoint_id = key
@ -644,7 +638,7 @@ class PyDevdAPI(object):
:param int breakpoint_id:
'''
for key, val in dict_items(py_db.api_received_breakpoints):
for key, val in list(py_db.api_received_breakpoints.items()):
original_filename, existing_breakpoint_id = key
_new_filename, _api_add_breakpoint_params = val
if received_filename == original_filename and existing_breakpoint_id == breakpoint_id:
@ -687,7 +681,7 @@ class PyDevdAPI(object):
except KeyError:
pydev_log.info("Error removing breakpoint: Breakpoint id not found: %s id: %s. Available ids: %s\n",
canonical_normalized_filename, breakpoint_id, dict_keys(id_to_pybreakpoint))
canonical_normalized_filename, breakpoint_id, list(id_to_pybreakpoint))
py_db.on_breakpoints_changed(removed=True)

View file

@ -1,4 +1,3 @@
from _pydevd_bundle.pydevd_constants import dict_iter_values, IS_PY24
from _pydev_bundle import pydev_log
from _pydevd_bundle import pydevd_import_class
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame
@ -119,7 +118,7 @@ def get_exception_breakpoint(exctype, exceptions):
try:
return exceptions[exception_full_qname]
except KeyError:
for exception_breakpoint in dict_iter_values(exceptions):
for exception_breakpoint in exceptions.values():
if exception_breakpoint.type is not None and issubclass(exctype, exception_breakpoint.type):
if exc is None or issubclass(exception_breakpoint.type, exc.type):
exc = exception_breakpoint
@ -179,9 +178,6 @@ def stop_on_unhandled_exception(py_db, thread, additional_info, arg):
def get_exception_class(kls):
if IS_PY24 and "BaseException" == kls:
kls = "Exception"
try:
return eval(kls)
except:

View file

@ -4,10 +4,6 @@ Bytecode analysing utils. Originally added for using in smart step into.
Note: not importable from Python 2.
"""
import sys
if sys.version_info[0] < 3:
raise ImportError('This module is only compatible with Python 3.')
from _pydev_bundle import pydev_log
from types import CodeType
from _pydevd_frame_eval.vendored.bytecode.instr import _Variable

View file

@ -8,15 +8,12 @@ Note: this is a work in progress / proof of concept / not ready to be used.
import dis
from _pydevd_bundle.pydevd_collect_bytecode_info import iter_instructions
from _pydevd_bundle.pydevd_constants import dict_iter_items, IS_PY2
from _pydev_bundle import pydev_log
import sys
import inspect
from io import StringIO
try:
xrange = xrange
except:
xrange = range
xrange = range
class _Stack(object):
@ -354,110 +351,90 @@ class _CallFunction(_BaseHandler):
self.stack.push(self)
if IS_PY2:
@_register
class _MakeFunctionPy3(_BaseHandler):
"""
Pushes a new function object on the stack. From bottom to top, the consumed stack must consist
of values if the argument carries a specified flag value
@_register
class _MakeFunctionPy2(_BaseHandler):
"""
Pushes a new function object on the stack. TOS is the code associated with the function. The
function object is defined to have argc default parameters, which are found below TOS.
"""
0x01 a tuple of default values for positional-only and positional-or-keyword parameters in positional order
opname = "MAKE_FUNCTION"
0x02 a dictionary of keyword-only parameters' default values
def _handle(self):
stack = self.stack
self.code = stack.pop()
0x04 an annotation dictionary
stack.push(self)
0x08 a tuple containing cells for free variables, making a closure
_MakeFunction = _MakeFunctionPy2
the code associated with the function (at TOS1)
else:
the qualified name of the function (at TOS)
"""
@_register
class _MakeFunctionPy3(_BaseHandler):
"""
Pushes a new function object on the stack. From bottom to top, the consumed stack must consist
of values if the argument carries a specified flag value
opname = "MAKE_FUNCTION"
is_lambda = False
0x01 a tuple of default values for positional-only and positional-or-keyword parameters in positional order
def _handle(self):
stack = self.stack
self.qualified_name = stack.pop()
self.code = stack.pop()
0x02 a dictionary of keyword-only parameters' default values
default_node = None
if self.instruction.argval & 0x01:
default_node = stack.pop()
0x04 an annotation dictionary
is_lambda = self.is_lambda = '<lambda>' in [x.tok for x in self.qualified_name.tokens]
0x08 a tuple containing cells for free variables, making a closure
the code associated with the function (at TOS1)
the qualified name of the function (at TOS)
"""
opname = "MAKE_FUNCTION"
is_lambda = False
def _handle(self):
stack = self.stack
self.qualified_name = stack.pop()
self.code = stack.pop()
default_node = None
if self.instruction.argval & 0x01:
default_node = stack.pop()
is_lambda = self.is_lambda = '<lambda>' in [x.tok for x in self.qualified_name.tokens]
if not is_lambda:
def_token = _Token(self.i_line, None, 'def ')
self.tokens.append(def_token)
for token in self.qualified_name.tokens:
self.tokens.append(token)
if not is_lambda:
def_token = _Token(self.i_line, None, 'def ')
self.tokens.append(def_token)
token.mark_after(def_token)
prev = token
for token in self.qualified_name.tokens:
self.tokens.append(token)
if not is_lambda:
token.mark_after(def_token)
prev = token
open_parens_token = _Token(self.i_line, None, '(', after=prev)
self.tokens.append(open_parens_token)
prev = open_parens_token
open_parens_token = _Token(self.i_line, None, '(', after=prev)
self.tokens.append(open_parens_token)
prev = open_parens_token
code = self.code.instruction.argval
code = self.code.instruction.argval
if default_node:
defaults = ([_SENTINEL] * (len(code.co_varnames) - len(default_node.instruction.argval))) + list(default_node.instruction.argval)
else:
defaults = [_SENTINEL] * len(code.co_varnames)
if default_node:
defaults = ([_SENTINEL] * (len(code.co_varnames) - len(default_node.instruction.argval))) + list(default_node.instruction.argval)
else:
defaults = [_SENTINEL] * len(code.co_varnames)
for i, arg in enumerate(code.co_varnames):
if i > 0:
comma_token = _Token(prev.i_line, None, ', ', after=prev)
self.tokens.append(comma_token)
prev = comma_token
for i, arg in enumerate(code.co_varnames):
if i > 0:
comma_token = _Token(prev.i_line, None, ', ', after=prev)
self.tokens.append(comma_token)
prev = comma_token
arg_token = _Token(self.i_line, None, arg, after=prev)
self.tokens.append(arg_token)
arg_token = _Token(self.i_line, None, arg, after=prev)
self.tokens.append(arg_token)
default = defaults[i]
if default is not _SENTINEL:
eq_token = _Token(default_node.i_line, None, '=', after=prev)
self.tokens.append(eq_token)
prev = eq_token
default = defaults[i]
if default is not _SENTINEL:
eq_token = _Token(default_node.i_line, None, '=', after=prev)
self.tokens.append(eq_token)
prev = eq_token
default_token = _Token(default_node.i_line, None, str(default), after=prev)
self.tokens.append(default_token)
prev = default_token
default_token = _Token(default_node.i_line, None, str(default), after=prev)
self.tokens.append(default_token)
prev = default_token
tok_close_parens = _Token(prev.i_line, None, '):', after=prev)
self.tokens.append(tok_close_parens)
tok_close_parens = _Token(prev.i_line, None, '):', after=prev)
self.tokens.append(tok_close_parens)
self._write_tokens()
self._write_tokens()
stack.push(self)
self.writer.indent(prev.i_line + 1)
self.writer.dedent(max(self.disassembler.merge_code(code)))
stack.push(self)
self.writer.indent(prev.i_line + 1)
self.writer.dedent(max(self.disassembler.merge_code(code)))
_MakeFunction = _MakeFunctionPy3
_MakeFunction = _MakeFunctionPy3
def _print_after_info(line_contents, stream=None):
@ -518,10 +495,6 @@ def _compose_line_contents(line_contents, previous_line_tokens):
if token not in handled:
lst.append(token.tok)
try:
from StringIO import StringIO
except:
from io import StringIO
stream = StringIO()
_print_after_info(line_contents, stream)
pydev_log.critical('Error. After markers are not correct:\n%s', stream.getvalue())
@ -577,7 +550,7 @@ class _PyCodeToSource(object):
# print(d, getattr(code, d))
line_to_contents = _PyCodeToSource(code, self.memo).build_line_to_contents()
lines = []
for line, contents in sorted(dict_iter_items(line_to_contents)):
for line, contents in sorted(line_to_contents.items()):
lines.append(line)
self.writer.get_line(line).extend(contents)
if DEBUG:
@ -587,13 +560,11 @@ class _PyCodeToSource(object):
def disassemble(self):
show_lines = False
line_to_contents = self.build_line_to_contents()
from io import StringIO
stream = StringIO()
last_line = 0
indent = ''
previous_line_tokens = set()
for i_line, contents in sorted(dict_iter_items(line_to_contents)):
for i_line, contents in sorted(line_to_contents.items()):
while last_line < i_line - 1:
if show_lines:
stream.write(u"%s.\n" % (last_line + 1,))

View file

@ -4,15 +4,12 @@ import sys
from collections import namedtuple
from _pydev_bundle import pydev_log
from _pydevd_bundle.pydevd_constants import (IS_PY38_OR_GREATER,
dict_iter_items, dict_iter_values)
from _pydevd_bundle.pydevd_constants import IS_PY38_OR_GREATER
from opcode import (EXTENDED_ARG, HAVE_ARGUMENT, cmp_op, hascompare, hasconst,
hasfree, hasjrel, haslocal, hasname, opname)
try:
xrange
except NameError:
xrange = range
xrange = range
from io import StringIO
class TryExceptInfo(object):
@ -892,9 +889,9 @@ class _Disassembler(object):
instruction.argval, self.firstlineno, self.level + 1
).build_line_to_contents()
for contents in dict_iter_values(code_line_to_contents):
for contents in code_line_to_contents.values():
contents.insert(0, ' ')
for line, contents in dict_iter_items(code_line_to_contents):
for line, contents in code_line_to_contents.items():
line_to_contents.setdefault(line, []).extend(contents)
return msg(instruction, 'LOAD_CONST(code)')
@ -935,14 +932,10 @@ class _Disassembler(object):
def disassemble(self):
line_to_contents = self.build_line_to_contents()
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
stream = StringIO()
last_line = 0
show_lines = False
for line, contents in sorted(dict_iter_items(line_to_contents)):
for line, contents in sorted(line_to_contents.items()):
while last_line < line - 1:
if show_lines:
stream.write('%s.\n' % (last_line + 1,))

View file

@ -71,7 +71,7 @@ from _pydev_imps._pydev_saved_modules import time
from _pydev_imps._pydev_saved_modules import threading
from _pydev_imps._pydev_saved_modules import socket as socket_module
from _pydevd_bundle.pydevd_constants import (DebugInfoHolder, IS_WINDOWS, IS_JYTHON,
IS_PY2, IS_PY36_OR_GREATER, STATE_RUN, dict_keys, ASYNC_EVAL_TIMEOUT_SEC,
IS_PY36_OR_GREATER, STATE_RUN, ASYNC_EVAL_TIMEOUT_SEC,
get_global_debugger, GetGlobalDebugger, set_global_debugger, silence_warnings_decorator) # Keep for backward compatibility @UnusedImport
from _pydev_bundle.pydev_override import overrides
import weakref
@ -89,20 +89,10 @@ import dis
from _pydevd_bundle.pydevd_frame_utils import create_frames_list_from_exception_cause
import pydevd_file_utils
import itertools
from functools import partial
try:
from urllib import quote_plus, unquote_plus # @UnresolvedImport
except:
from urllib.parse import quote_plus, unquote_plus # @Reimport @UnresolvedImport
from urllib.parse import quote_plus, unquote_plus
import pydevconsole
from _pydevd_bundle import pydevd_vars, pydevd_io, pydevd_reload
try:
from _pydevd_bundle import pydevd_bytecode_utils
except ImportError:
pydevd_bytecode_utils = None # i.e.: Not available on Py2.
from _pydevd_bundle import pydevd_bytecode_utils
from _pydevd_bundle import pydevd_xml
from _pydevd_bundle import pydevd_vm_type
import sys
@ -116,13 +106,7 @@ from _pydev_bundle import _pydev_completer
from pydevd_tracing import get_exception_traceback_str
from _pydevd_bundle import pydevd_console
from _pydev_bundle.pydev_monkey import disable_trace_thread_modules, enable_trace_thread_modules
try:
import cStringIO as StringIO # may not always be available @UnusedImport
except:
try:
import StringIO # @Reimport @UnresolvedImport
except:
import io as StringIO
from io import StringIO
# CMD_XXX constants imported for backward compatibility
from _pydevd_bundle.pydevd_comm_constants import * # @UnusedWildImport
@ -583,9 +567,6 @@ def _send_io_message(py_db, s):
def internal_reload_code(dbg, seq, module_name, filename):
try:
found_module_to_reload = False
if IS_PY2 and isinstance(filename, unicode):
filename = filename.encode(sys.getfilesystemencoding())
if module_name is not None:
module_name = module_name
if module_name not in sys.modules:
@ -727,11 +708,6 @@ class InternalSetNextStatementThread(InternalThreadCommand):
self.line = line
self.seq = seq
if IS_PY2:
if isinstance(func_name, unicode):
# On cython with python 2.X it requires an str, not unicode (but on python 3.3 it should be a str, not bytes).
func_name = func_name.encode('utf-8')
self.func_name = func_name
def do_it(self, dbg):
@ -808,18 +784,18 @@ class InternalGetVariable(InternalThreadCommand):
def do_it(self, dbg):
''' Converts request into python variable '''
try:
xml = StringIO.StringIO()
xml = StringIO()
xml.write("<xml>")
_typeName, val_dict = pydevd_vars.resolve_compound_variable_fields(
type_name, val_dict = pydevd_vars.resolve_compound_variable_fields(
dbg, self.thread_id, self.frame_id, self.scope, self.attributes)
if val_dict is None:
val_dict = {}
# assume properly ordered if resolver returns 'OrderedDict'
# check type as string to support OrderedDict backport for older Python
keys = dict_keys(val_dict)
if not (_typeName == "OrderedDict" or val_dict.__class__.__name__ == "OrderedDict" or IS_PY36_OR_GREATER):
keys.sort(key=compare_object_attrs_key)
keys = list(val_dict)
if not (type_name == "OrderedDict" or val_dict.__class__.__name__ == "OrderedDict" or IS_PY36_OR_GREATER):
keys = sorted(keys, key=compare_object_attrs_key)
timer = Timer()
for k in keys:
@ -1167,12 +1143,6 @@ def internal_evaluate_expression_json(py_db, request, thread_id):
ctx = NULL
with ctx:
if IS_PY2 and isinstance(expression, unicode):
try:
expression.encode('utf-8')
except Exception:
_evaluate_response(py_db, request, '', error_message='Expression is not valid utf-8.')
raise
try_exec = False
if frame_id is None:
@ -1338,19 +1308,6 @@ def internal_set_expression_json(py_db, request, thread_id):
if hasattr(fmt, 'to_dict'):
fmt = fmt.to_dict()
if IS_PY2 and isinstance(expression, unicode):
try:
expression = expression.encode('utf-8')
except:
_evaluate_response(py_db, request, '', error_message='Expression is not valid utf-8.')
raise
if IS_PY2 and isinstance(value, unicode):
try:
value = value.encode('utf-8')
except:
_evaluate_response(py_db, request, '', error_message='Value is not valid utf-8.')
raise
frame = py_db.find_frame(thread_id, frame_id)
exec_code = '%s = (%s)' % (expression, value)
result = pydevd_vars.evaluate_expression(py_db, frame, exec_code, is_exec=True)
@ -1402,12 +1359,6 @@ def internal_get_completions(dbg, seq, thread_id, frame_id, act_tok, line=-1, co
frame = dbg.find_frame(thread_id, frame_id)
if frame is not None:
if IS_PY2:
if not isinstance(act_tok, bytes):
act_tok = act_tok.encode('utf-8')
if not isinstance(qualifier, bytes):
qualifier = qualifier.encode('utf-8')
completions = _pydev_completer.generate_completions(frame, act_tok)
# Note that qualifier and start are only actually valid for the
@ -1833,7 +1784,7 @@ class AbstractGetValueAsyncThread(PyDBDaemonThread):
@overrides(PyDBDaemonThread._on_run)
def _on_run(self):
start = time.time()
xml = StringIO.StringIO()
xml = StringIO()
xml.write("<xml>")
for (var_obj, name) in self.var_objs:
current_time = time.time()

View file

@ -104,15 +104,12 @@ IS_MAC = sys.platform == 'darwin'
IS_64BIT_PROCESS = sys.maxsize > (2 ** 32)
IS_JYTHON = pydevd_vm_type.get_vm_type() == pydevd_vm_type.PydevdVmType.JYTHON
IS_JYTH_LESS25 = False
IS_PYPY = platform.python_implementation() == 'PyPy'
if IS_JYTHON:
import java.lang.System # @UnresolvedImport
IS_WINDOWS = java.lang.System.getProperty("os.name").lower().startswith("windows")
if sys.version_info[0] == 2 and sys.version_info[1] < 5:
IS_JYTH_LESS25 = True
USE_CUSTOM_SYS_CURRENT_FRAMES = not hasattr(sys, '_current_frames') or IS_PYPY
USE_CUSTOM_SYS_CURRENT_FRAMES_MAP = USE_CUSTOM_SYS_CURRENT_FRAMES and (IS_PYPY or IS_IRONPYTHON)
@ -166,45 +163,16 @@ CYTHON_SUPPORTED = False
python_implementation = platform.python_implementation()
if python_implementation == 'CPython':
# Only available for CPython!
if (
(sys.version_info[0] == 2 and sys.version_info[1] >= 6)
or (sys.version_info[0] == 3 and sys.version_info[1] >= 3)
or (sys.version_info[0] > 3)
):
# Supported in 2.6,2.7 or 3.3 onwards (32 or 64)
CYTHON_SUPPORTED = True
CYTHON_SUPPORTED = True
#=======================================================================================================================
# Python 3?
#=======================================================================================================================
IS_PY3K = False
IS_PY34_OR_GREATER = False
IS_PY35_OR_GREATER = False
IS_PY36_OR_GREATER = False
IS_PY37_OR_GREATER = False
IS_PY38_OR_GREATER = False
IS_PY39_OR_GREATER = False
IS_PY310_OR_GREATER = False
IS_PY2 = True
IS_PY27 = False
IS_PY24 = False
try:
if sys.version_info[0] >= 3:
IS_PY3K = True
IS_PY2 = False
IS_PY34_OR_GREATER = sys.version_info >= (3, 4)
IS_PY35_OR_GREATER = sys.version_info >= (3, 5)
IS_PY36_OR_GREATER = sys.version_info >= (3, 6)
IS_PY37_OR_GREATER = sys.version_info >= (3, 7)
IS_PY38_OR_GREATER = sys.version_info >= (3, 8)
IS_PY39_OR_GREATER = sys.version_info >= (3, 9)
IS_PY310_OR_GREATER = sys.version_info >= (3, 10)
elif sys.version_info[0] == 2 and sys.version_info[1] == 7:
IS_PY27 = True
elif sys.version_info[0] == 2 and sys.version_info[1] == 4:
IS_PY24 = True
except AttributeError:
pass # Not all versions have sys.version_info
IS_PY36_OR_GREATER = sys.version_info >= (3, 6)
IS_PY37_OR_GREATER = sys.version_info >= (3, 7)
IS_PY38_OR_GREATER = sys.version_info >= (3, 8)
IS_PY39_OR_GREATER = sys.version_info >= (3, 9)
IS_PY310_OR_GREATER = sys.version_info >= (3, 10)
def version_str(v):
@ -309,7 +277,7 @@ LOAD_VALUES_ASYNC = is_true_in_env('PYDEVD_LOAD_VALUES_ASYNC')
DEFAULT_VALUE = "__pydevd_value_async"
ASYNC_EVAL_TIMEOUT_SEC = 60
NEXT_VALUE_SEPARATOR = "__pydev_val__"
BUILTINS_MODULE_NAME = '__builtin__' if IS_PY2 else 'builtins'
BUILTINS_MODULE_NAME = 'builtins'
SHOW_DEBUG_INFO_ENV = is_true_in_env(('PYCHARM_DEBUG', 'PYDEV_DEBUG', 'PYDEVD_DEBUG'))
# Pandas customization.
@ -472,68 +440,10 @@ def after_fork():
_thread_id_lock = ForkSafeLock()
thread_get_ident = thread.get_ident
if IS_PY3K:
def dict_keys(d):
return list(d.keys())
def dict_values(d):
return list(d.values())
dict_iter_values = dict.values
def dict_iter_items(d):
return d.items()
def dict_items(d):
return list(d.items())
def as_str(s):
assert isinstance(s, str)
return s
else:
dict_keys = None
try:
dict_keys = dict.keys
except:
pass
if IS_JYTHON or not dict_keys:
def dict_keys(d):
return d.keys()
try:
dict_iter_values = dict.itervalues
except:
try:
dict_iter_values = dict.values # Older versions don't have the itervalues
except:
def dict_iter_values(d):
return d.values()
try:
dict_values = dict.values
except:
def dict_values(d):
return d.values()
def dict_iter_items(d):
try:
return d.iteritems()
except:
return d.items()
def dict_items(d):
return d.items()
def as_str(s):
if isinstance(s, unicode):
return s.encode('utf-8')
return s
def as_str(s):
assert isinstance(s, str)
return s
def silence_warnings_decorator(func):
@ -548,36 +458,22 @@ def silence_warnings_decorator(func):
def sorted_dict_repr(d):
s = sorted(dict_iter_items(d), key=lambda x:str(x[0]))
s = sorted(d.items(), key=lambda x:str(x[0]))
return '{' + ', '.join(('%r: %r' % x) for x in s) + '}'
def iter_chars(b):
# In Python 2, we can iterate bytes or unicode with individual characters, but Python 3 onwards
# changed that behavior so that when iterating bytes we actually get ints!
if not IS_PY2:
if isinstance(b, bytes):
# i.e.: do something as struct.unpack('3c', b)
return iter(struct.unpack(str(len(b)) + 'c', b))
if isinstance(b, bytes):
# i.e.: do something as struct.unpack('3c', b)
return iter(struct.unpack(str(len(b)) + 'c', b))
return iter(b)
try:
xrange = xrange
except:
# Python 3k does not have it
xrange = range
try:
import itertools
izip = itertools.izip
except:
izip = zip
try:
from StringIO import StringIO
except:
from io import StringIO
# Python 3k does not have it
xrange = range
izip = zip
if IS_JYTHON:

View file

@ -1549,6 +1549,29 @@ static CYTHON_INLINE PyObject* __Pyx_dict_iterator(PyObject* dict, int is_dict,
static CYTHON_INLINE int __Pyx_dict_iter_next(PyObject* dict_or_iter, Py_ssize_t orig_length, Py_ssize_t* ppos,
PyObject** pkey, PyObject** pvalue, PyObject** pitem, int is_dict);
/* py_dict_values.proto */
static CYTHON_INLINE PyObject* __Pyx_PyDict_Values(PyObject* d);
/* CallUnboundCMethod0.proto */
static PyObject* __Pyx__CallUnboundCMethod0(__Pyx_CachedCFunction* cfunc, PyObject* self);
#if CYTHON_COMPILING_IN_CPYTHON
#define __Pyx_CallUnboundCMethod0(cfunc, self)\
(likely((cfunc)->func) ?\
(likely((cfunc)->flag == METH_NOARGS) ? (*((cfunc)->func))(self, NULL) :\
(PY_VERSION_HEX >= 0x030600B1 && likely((cfunc)->flag == METH_FASTCALL) ?\
(PY_VERSION_HEX >= 0x030700A0 ?\
(*(__Pyx_PyCFunctionFast)(void*)(PyCFunction)(cfunc)->func)(self, &__pyx_empty_tuple, 0) :\
(*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, &__pyx_empty_tuple, 0, NULL)) :\
(PY_VERSION_HEX >= 0x030700A0 && (cfunc)->flag == (METH_FASTCALL | METH_KEYWORDS) ?\
(*(__Pyx_PyCFunctionFastWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, &__pyx_empty_tuple, 0, NULL) :\
(likely((cfunc)->flag == (METH_VARARGS | METH_KEYWORDS)) ? ((*(PyCFunctionWithKeywords)(void*)(PyCFunction)(cfunc)->func)(self, __pyx_empty_tuple, NULL)) :\
((cfunc)->flag == METH_VARARGS ? (*((cfunc)->func))(self, __pyx_empty_tuple) :\
__Pyx__CallUnboundCMethod0(cfunc, self)))))) :\
__Pyx__CallUnboundCMethod0(cfunc, self))
#else
#define __Pyx_CallUnboundCMethod0(cfunc, self) __Pyx__CallUnboundCMethod0(cfunc, self)
#endif
/* DictGetItem.proto */
#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY
static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key);
@ -1816,8 +1839,8 @@ static const char __pyx_k_reduce[] = "__reduce__";
static const char __pyx_k_return[] = "return";
static const char __pyx_k_thread[] = "thread";
static const char __pyx_k_update[] = "update";
static const char __pyx_k_values[] = "values";
static const char __pyx_k_writer[] = "writer";
static const char __pyx_k_IS_PY3K[] = "IS_PY3K";
static const char __pyx_k_co_name[] = "co_name";
static const char __pyx_k_compile[] = "compile";
static const char __pyx_k_f_lasti[] = "f_lasti";
@ -1927,7 +1950,6 @@ static const char __pyx_k_pyx_PickleError[] = "__pyx_PickleError";
static const char __pyx_k_setstate_cython[] = "__setstate_cython__";
static const char __pyx_k_trace_exception[] = "trace_exception";
static const char __pyx_k_DEBUG_START_PY3K[] = "DEBUG_START_PY3K";
static const char __pyx_k_dict_iter_values[] = "dict_iter_values";
static const char __pyx_k_in_project_scope[] = "in_project_scope";
static const char __pyx_k_threading_active[] = "threading_active";
static const char __pyx_k_try_except_infos[] = "try_except_infos";
@ -2053,7 +2075,6 @@ static PyObject *__pyx_n_s_EXCEPTION_TYPE_USER_UNHANDLED;
static PyObject *__pyx_n_s_ForkSafeLock;
static PyObject *__pyx_n_s_GeneratorExit;
static PyObject *__pyx_n_s_IGNORE_EXCEPTION_TAG;
static PyObject *__pyx_n_s_IS_PY3K;
static PyObject *__pyx_kp_s_IgnoreException;
static PyObject *__pyx_kp_s_Ignore_exception_s_in_library_s;
static PyObject *__pyx_n_s_ImportError;
@ -2135,7 +2156,6 @@ static PyObject *__pyx_n_s_constructed_tid_to_last_frame;
static PyObject *__pyx_n_s_current_frames;
static PyObject *__pyx_n_s_debug;
static PyObject *__pyx_n_s_dict;
static PyObject *__pyx_n_s_dict_iter_values;
static PyObject *__pyx_n_s_dis;
static PyObject *__pyx_n_s_disable_tracing;
static PyObject *__pyx_n_s_do_wait_suspend;
@ -2332,6 +2352,7 @@ static PyObject *__pyx_n_s_try_exc_info;
static PyObject *__pyx_n_s_try_except_infos;
static PyObject *__pyx_n_s_update;
static PyObject *__pyx_kp_s_utf_8;
static PyObject *__pyx_n_s_values;
static PyObject *__pyx_n_s_version;
static PyObject *__pyx_n_s_writer;
static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */
@ -2481,6 +2502,7 @@ static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTra
static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_ThreadTracer(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
static __Pyx_CachedCFunction __pyx_umethod_PyDict_Type_get = {0, &__pyx_n_s_get, 0, 0, 0};
static __Pyx_CachedCFunction __pyx_umethod_PyDict_Type_update = {0, &__pyx_n_s_update, 0, 0, 0};
static __Pyx_CachedCFunction __pyx_umethod_PyDict_Type_values = {0, &__pyx_n_s_values, 0, 0, 0};
static __Pyx_CachedCFunction __pyx_umethod_PyString_Type_rfind = {0, &__pyx_n_s_rfind, 0, 0, 0};
static PyObject *__pyx_int_0;
static PyObject *__pyx_int_1;
@ -17316,7 +17338,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
* if curr_func_name in ('?', '<module>', '<lambda>'):
* curr_func_name = '' # <<<<<<<<<<<<<<
*
* for bp in dict_iter_values(breakpoints_for_file): # jython does not support itervalues()
* for bp in breakpoints_for_file.values():
*/
__Pyx_INCREF(__pyx_kp_s_);
__Pyx_DECREF_SET(__pyx_v_curr_func_name, __pyx_kp_s_);
@ -17333,27 +17355,16 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
/* "_pydevd_bundle/pydevd_cython.pyx":958
* curr_func_name = ''
*
* for bp in dict_iter_values(breakpoints_for_file): # jython does not support itervalues() # <<<<<<<<<<<<<<
* for bp in breakpoints_for_file.values(): # <<<<<<<<<<<<<<
* # will match either global or some function
* if bp.func_name in ('None', curr_func_name):
*/
__Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_dict_iter_values); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 958, __pyx_L74_except_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_8 = NULL;
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {
__pyx_t_8 = PyMethod_GET_SELF(__pyx_t_3);
if (likely(__pyx_t_8)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);
__Pyx_INCREF(__pyx_t_8);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_3, function);
}
if (unlikely(__pyx_v_breakpoints_for_file == Py_None)) {
PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%.30s'", "values");
__PYX_ERR(0, 958, __pyx_L74_except_error)
}
__pyx_t_6 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_3, __pyx_t_8, __pyx_v_breakpoints_for_file) : __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_breakpoints_for_file);
__Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 958, __pyx_L74_except_error)
__pyx_t_6 = __Pyx_PyDict_Values(__pyx_v_breakpoints_for_file); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 958, __pyx_L74_except_error)
__Pyx_GOTREF(__pyx_t_6);
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
if (likely(PyList_CheckExact(__pyx_t_6)) || PyTuple_CheckExact(__pyx_t_6)) {
__pyx_t_3 = __pyx_t_6; __Pyx_INCREF(__pyx_t_3); __pyx_t_19 = 0;
__pyx_t_12 = NULL;
@ -17398,7 +17409,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
__pyx_t_6 = 0;
/* "_pydevd_bundle/pydevd_cython.pyx":960
* for bp in dict_iter_values(breakpoints_for_file): # jython does not support itervalues()
* for bp in breakpoints_for_file.values():
* # will match either global or some function
* if bp.func_name in ('None', curr_func_name): # <<<<<<<<<<<<<<
* has_breakpoint_in_frame = True
@ -17438,7 +17449,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
goto __pyx_L90_break;
/* "_pydevd_bundle/pydevd_cython.pyx":960
* for bp in dict_iter_values(breakpoints_for_file): # jython does not support itervalues()
* for bp in breakpoints_for_file.values():
* # will match either global or some function
* if bp.func_name in ('None', curr_func_name): # <<<<<<<<<<<<<<
* has_breakpoint_in_frame = True
@ -17449,7 +17460,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
/* "_pydevd_bundle/pydevd_cython.pyx":958
* curr_func_name = ''
*
* for bp in dict_iter_values(breakpoints_for_file): # jython does not support itervalues() # <<<<<<<<<<<<<<
* for bp in breakpoints_for_file.values(): # <<<<<<<<<<<<<<
* # will match either global or some function
* if bp.func_name in ('None', curr_func_name):
*/
@ -21573,7 +21584,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
* else:
* stop = False # <<<<<<<<<<<<<<
*
* if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"):
* if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"):
*/
/*else*/ {
__pyx_v_stop = 0;
@ -21583,7 +21594,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
/* "_pydevd_bundle/pydevd_cython.pyx":1242
* stop = False
*
* if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"): # <<<<<<<<<<<<<<
* if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"): # <<<<<<<<<<<<<<
* f_code = getattr(frame.f_back, 'f_code', None)
* if f_code is not None:
*/
@ -21605,15 +21616,6 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
__pyx_t_14 = __pyx_t_9;
goto __pyx_L236_bool_binop_done;
}
__Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_IS_PY3K); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1242, __pyx_L166_error)
__Pyx_GOTREF(__pyx_t_3);
__pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 1242, __pyx_L166_error)
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
if (__pyx_t_9) {
} else {
__pyx_t_14 = __pyx_t_9;
goto __pyx_L236_bool_binop_done;
}
__pyx_t_9 = __Pyx_HasAttr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(__pyx_t_9 == ((int)-1))) __PYX_ERR(0, 1242, __pyx_L166_error)
__pyx_t_10 = (__pyx_t_9 != 0);
__pyx_t_14 = __pyx_t_10;
@ -21622,7 +21624,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
/* "_pydevd_bundle/pydevd_cython.pyx":1243
*
* if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"):
* if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"):
* f_code = getattr(frame.f_back, 'f_code', None) # <<<<<<<<<<<<<<
* if f_code is not None:
* if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE:
@ -21636,7 +21638,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
__pyx_t_6 = 0;
/* "_pydevd_bundle/pydevd_cython.pyx":1244
* if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"):
* if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"):
* f_code = getattr(frame.f_back, 'f_code', None)
* if f_code is not None: # <<<<<<<<<<<<<<
* if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE:
@ -21701,7 +21703,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
}
/* "_pydevd_bundle/pydevd_cython.pyx":1244
* if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"):
* if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"):
* f_code = getattr(frame.f_back, 'f_code', None)
* if f_code is not None: # <<<<<<<<<<<<<<
* if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE:
@ -21712,7 +21714,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
/* "_pydevd_bundle/pydevd_cython.pyx":1242
* stop = False
*
* if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"): # <<<<<<<<<<<<<<
* if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"): # <<<<<<<<<<<<<<
* f_code = getattr(frame.f_back, 'f_code', None)
* if f_code is not None:
*/
@ -21811,7 +21813,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
* stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd)
* elif stop:
*/
goto __pyx_L243;
goto __pyx_L242;
}
/* "_pydevd_bundle/pydevd_cython.pyx":1250
@ -21935,7 +21937,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
* self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd)
* self.do_wait_suspend(thread, frame, event, arg)
*/
goto __pyx_L244;
goto __pyx_L243;
}
/* "_pydevd_bundle/pydevd_cython.pyx":1254
@ -22031,22 +22033,22 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
__Pyx_GOTREF(__pyx_t_8);
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
__pyx_t_13 = Py_TYPE(__pyx_t_8)->tp_iternext;
index = 0; __pyx_t_3 = __pyx_t_13(__pyx_t_8); if (unlikely(!__pyx_t_3)) goto __pyx_L246_unpacking_failed;
index = 0; __pyx_t_3 = __pyx_t_13(__pyx_t_8); if (unlikely(!__pyx_t_3)) goto __pyx_L245_unpacking_failed;
__Pyx_GOTREF(__pyx_t_3);
index = 1; __pyx_t_4 = __pyx_t_13(__pyx_t_8); if (unlikely(!__pyx_t_4)) goto __pyx_L246_unpacking_failed;
index = 1; __pyx_t_4 = __pyx_t_13(__pyx_t_8); if (unlikely(!__pyx_t_4)) goto __pyx_L245_unpacking_failed;
__Pyx_GOTREF(__pyx_t_4);
index = 2; __pyx_t_7 = __pyx_t_13(__pyx_t_8); if (unlikely(!__pyx_t_7)) goto __pyx_L246_unpacking_failed;
index = 2; __pyx_t_7 = __pyx_t_13(__pyx_t_8); if (unlikely(!__pyx_t_7)) goto __pyx_L245_unpacking_failed;
__Pyx_GOTREF(__pyx_t_7);
if (__Pyx_IternextUnpackEndCheck(__pyx_t_13(__pyx_t_8), 3) < 0) __PYX_ERR(0, 1260, __pyx_L166_error)
__pyx_t_13 = NULL;
__Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
goto __pyx_L247_unpacking_done;
__pyx_L246_unpacking_failed:;
goto __pyx_L246_unpacking_done;
__pyx_L245_unpacking_failed:;
__Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
__pyx_t_13 = NULL;
if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index);
__PYX_ERR(0, 1260, __pyx_L166_error)
__pyx_L247_unpacking_done:;
__pyx_L246_unpacking_done:;
}
__pyx_v_back_absolute_filename = __pyx_t_3;
__pyx_t_3 = 0;
@ -22084,7 +22086,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
if (!__pyx_t_10) {
} else {
__pyx_t_14 = __pyx_t_10;
goto __pyx_L249_bool_binop_done;
goto __pyx_L248_bool_binop_done;
}
__Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_DEBUG_START_PY3K); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1261, __pyx_L166_error)
__Pyx_GOTREF(__pyx_t_4);
@ -22093,7 +22095,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
__pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 1261, __pyx_L166_error)
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
__pyx_t_14 = __pyx_t_10;
__pyx_L249_bool_binop_done:;
__pyx_L248_bool_binop_done:;
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
__pyx_t_10 = (__pyx_t_14 != 0);
if (__pyx_t_10) {
@ -22115,7 +22117,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
* back = None
*
*/
goto __pyx_L248;
goto __pyx_L247;
}
/* "_pydevd_bundle/pydevd_cython.pyx":1264
@ -22306,7 +22308,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
* # In this case, we'll have to skip the previous one because it shouldn't be traced.
*/
}
__pyx_L248:;
__pyx_L247:;
/* "_pydevd_bundle/pydevd_cython.pyx":1256
* elif is_return: # return event
@ -22429,7 +22431,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
* # if we're in a return, we want it to appear to the user in the previous frame!
* self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd)
*/
goto __pyx_L252;
goto __pyx_L251;
}
/* "_pydevd_bundle/pydevd_cython.pyx":1285
@ -22473,7 +22475,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
*/
__pyx_v_info->pydev_state = 1;
}
__pyx_L252:;
__pyx_L251:;
/* "_pydevd_bundle/pydevd_cython.pyx":1254
* self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd)
@ -22483,7 +22485,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
* if back is not None:
*/
}
__pyx_L244:;
__pyx_L243:;
/* "_pydevd_bundle/pydevd_cython.pyx":1250
* if plugin_stop:
@ -22493,7 +22495,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
* self.set_suspend(thread, step_cmd, original_step_cmd=info.pydev_original_step_cmd)
*/
}
__pyx_L243:;
__pyx_L242:;
/* "_pydevd_bundle/pydevd_cython.pyx":1093
*
@ -22584,9 +22586,9 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
* info.pydev_original_step_cmd = -1
* info.pydev_step_cmd = -1
*/
__Pyx_GetModuleGlobalName(__pyx_t_8, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1294, __pyx_L257_error)
__Pyx_GetModuleGlobalName(__pyx_t_8, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 1294, __pyx_L256_error)
__Pyx_GOTREF(__pyx_t_8);
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_exception); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1294, __pyx_L257_error)
__pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_exception); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1294, __pyx_L256_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
__pyx_t_8 = NULL;
@ -22601,7 +22603,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
}
__pyx_t_3 = (__pyx_t_8) ? __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_8) : __Pyx_PyObject_CallNoArg(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1294, __pyx_L257_error)
if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1294, __pyx_L256_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
@ -22648,8 +22650,8 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
__Pyx_XDECREF(__pyx_t_28); __pyx_t_28 = 0;
__Pyx_XDECREF(__pyx_t_27); __pyx_t_27 = 0;
__Pyx_XDECREF(__pyx_t_26); __pyx_t_26 = 0;
goto __pyx_L264_try_end;
__pyx_L257_error:;
goto __pyx_L263_try_end;
__pyx_L256_error:;
__Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
__Pyx_XDECREF(__pyx_t_21); __pyx_t_21 = 0;
@ -22665,7 +22667,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
*/
/*except:*/ {
__Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename);
if (__Pyx_GetException(&__pyx_t_3, &__pyx_t_1, &__pyx_t_8) < 0) __PYX_ERR(0, 1298, __pyx_L259_except_error)
if (__Pyx_GetException(&__pyx_t_3, &__pyx_t_1, &__pyx_t_8) < 0) __PYX_ERR(0, 1298, __pyx_L258_except_error)
__Pyx_GOTREF(__pyx_t_3);
__Pyx_GOTREF(__pyx_t_1);
__Pyx_GOTREF(__pyx_t_8);
@ -22682,7 +22684,7 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
__Pyx_INCREF(Py_None);
__pyx_t_2 = Py_None;
} else {
__Pyx_GetModuleGlobalName(__pyx_t_30, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_30)) __PYX_ERR(0, 1299, __pyx_L259_except_error)
__Pyx_GetModuleGlobalName(__pyx_t_30, __pyx_n_s_NO_FTRACE); if (unlikely(!__pyx_t_30)) __PYX_ERR(0, 1299, __pyx_L258_except_error)
__Pyx_GOTREF(__pyx_t_30);
__pyx_t_2 = __pyx_t_30;
__pyx_t_30 = 0;
@ -22695,9 +22697,9 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;
goto __pyx_L260_except_return;
goto __pyx_L259_except_return;
}
__pyx_L259_except_error:;
__pyx_L258_except_error:;
/* "_pydevd_bundle/pydevd_cython.pyx":1293
* raise
@ -22711,13 +22713,13 @@ static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispa
__Pyx_XGIVEREF(__pyx_t_26);
__Pyx_ExceptionReset(__pyx_t_28, __pyx_t_27, __pyx_t_26);
goto __pyx_L168_except_error;
__pyx_L260_except_return:;
__pyx_L259_except_return:;
__Pyx_XGIVEREF(__pyx_t_28);
__Pyx_XGIVEREF(__pyx_t_27);
__Pyx_XGIVEREF(__pyx_t_26);
__Pyx_ExceptionReset(__pyx_t_28, __pyx_t_27, __pyx_t_26);
goto __pyx_L169_except_return;
__pyx_L264_try_end:;
__pyx_L263_try_end:;
}
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
@ -36225,7 +36227,6 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = {
{&__pyx_n_s_ForkSafeLock, __pyx_k_ForkSafeLock, sizeof(__pyx_k_ForkSafeLock), 0, 0, 1, 1},
{&__pyx_n_s_GeneratorExit, __pyx_k_GeneratorExit, sizeof(__pyx_k_GeneratorExit), 0, 0, 1, 1},
{&__pyx_n_s_IGNORE_EXCEPTION_TAG, __pyx_k_IGNORE_EXCEPTION_TAG, sizeof(__pyx_k_IGNORE_EXCEPTION_TAG), 0, 0, 1, 1},
{&__pyx_n_s_IS_PY3K, __pyx_k_IS_PY3K, sizeof(__pyx_k_IS_PY3K), 0, 0, 1, 1},
{&__pyx_kp_s_IgnoreException, __pyx_k_IgnoreException, sizeof(__pyx_k_IgnoreException), 0, 0, 1, 0},
{&__pyx_kp_s_Ignore_exception_s_in_library_s, __pyx_k_Ignore_exception_s_in_library_s, sizeof(__pyx_k_Ignore_exception_s_in_library_s), 0, 0, 1, 0},
{&__pyx_n_s_ImportError, __pyx_k_ImportError, sizeof(__pyx_k_ImportError), 0, 0, 1, 1},
@ -36307,7 +36308,6 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = {
{&__pyx_n_s_current_frames, __pyx_k_current_frames, sizeof(__pyx_k_current_frames), 0, 0, 1, 1},
{&__pyx_n_s_debug, __pyx_k_debug, sizeof(__pyx_k_debug), 0, 0, 1, 1},
{&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1},
{&__pyx_n_s_dict_iter_values, __pyx_k_dict_iter_values, sizeof(__pyx_k_dict_iter_values), 0, 0, 1, 1},
{&__pyx_n_s_dis, __pyx_k_dis, sizeof(__pyx_k_dis), 0, 0, 1, 1},
{&__pyx_n_s_disable_tracing, __pyx_k_disable_tracing, sizeof(__pyx_k_disable_tracing), 0, 0, 1, 1},
{&__pyx_n_s_do_wait_suspend, __pyx_k_do_wait_suspend, sizeof(__pyx_k_do_wait_suspend), 0, 0, 1, 1},
@ -36504,6 +36504,7 @@ static __Pyx_StringTabEntry __pyx_string_tab[] = {
{&__pyx_n_s_try_except_infos, __pyx_k_try_except_infos, sizeof(__pyx_k_try_except_infos), 0, 0, 1, 1},
{&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1},
{&__pyx_kp_s_utf_8, __pyx_k_utf_8, sizeof(__pyx_k_utf_8), 0, 0, 1, 0},
{&__pyx_n_s_values, __pyx_k_values, sizeof(__pyx_k_values), 0, 0, 1, 1},
{&__pyx_n_s_version, __pyx_k_version, sizeof(__pyx_k_version), 0, 0, 1, 1},
{&__pyx_n_s_writer, __pyx_k_writer, sizeof(__pyx_k_writer), 0, 0, 1, 1},
{0, 0, 0, 0, 0, 0, 0}
@ -36707,6 +36708,7 @@ static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) {
static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) {
__pyx_umethod_PyDict_Type_get.type = (PyObject*)&PyDict_Type;
__pyx_umethod_PyDict_Type_update.type = (PyObject*)&PyDict_Type;
__pyx_umethod_PyDict_Type_values.type = (PyObject*)&PyDict_Type;
__pyx_umethod_PyString_Type_rfind.type = (PyObject*)&PyString_Type;
if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
__pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) __PYX_ERR(0, 1, __pyx_L1_error)
@ -37279,7 +37281,7 @@ if (!__Pyx_RefNanny) {
*
* from _pydev_bundle import pydev_log # <<<<<<<<<<<<<<
* from _pydevd_bundle import pydevd_dont_trace
* from _pydevd_bundle.pydevd_constants import (dict_iter_values, IS_PY3K, RETURN_VALUES_DICT, NO_FTRACE,
* from _pydevd_bundle.pydevd_constants import (RETURN_VALUES_DICT, NO_FTRACE,
*/
__pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 150, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
@ -37299,7 +37301,7 @@ if (!__Pyx_RefNanny) {
*
* from _pydev_bundle import pydev_log
* from _pydevd_bundle import pydevd_dont_trace # <<<<<<<<<<<<<<
* from _pydevd_bundle.pydevd_constants import (dict_iter_values, IS_PY3K, RETURN_VALUES_DICT, NO_FTRACE,
* from _pydevd_bundle.pydevd_constants import (RETURN_VALUES_DICT, NO_FTRACE,
* EXCEPTION_TYPE_HANDLED, EXCEPTION_TYPE_USER_UNHANDLED)
*/
__pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 151, __pyx_L1_error)
@ -37319,41 +37321,27 @@ if (!__Pyx_RefNanny) {
/* "_pydevd_bundle/pydevd_cython.pyx":152
* from _pydev_bundle import pydev_log
* from _pydevd_bundle import pydevd_dont_trace
* from _pydevd_bundle.pydevd_constants import (dict_iter_values, IS_PY3K, RETURN_VALUES_DICT, NO_FTRACE, # <<<<<<<<<<<<<<
* from _pydevd_bundle.pydevd_constants import (RETURN_VALUES_DICT, NO_FTRACE, # <<<<<<<<<<<<<<
* EXCEPTION_TYPE_HANDLED, EXCEPTION_TYPE_USER_UNHANDLED)
* from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised, remove_exception_from_frame, ignore_exception_trace
*/
__pyx_t_2 = PyList_New(6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 152, __pyx_L1_error)
__pyx_t_2 = PyList_New(4); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 152, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_INCREF(__pyx_n_s_dict_iter_values);
__Pyx_GIVEREF(__pyx_n_s_dict_iter_values);
PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_dict_iter_values);
__Pyx_INCREF(__pyx_n_s_IS_PY3K);
__Pyx_GIVEREF(__pyx_n_s_IS_PY3K);
PyList_SET_ITEM(__pyx_t_2, 1, __pyx_n_s_IS_PY3K);
__Pyx_INCREF(__pyx_n_s_RETURN_VALUES_DICT);
__Pyx_GIVEREF(__pyx_n_s_RETURN_VALUES_DICT);
PyList_SET_ITEM(__pyx_t_2, 2, __pyx_n_s_RETURN_VALUES_DICT);
PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_RETURN_VALUES_DICT);
__Pyx_INCREF(__pyx_n_s_NO_FTRACE);
__Pyx_GIVEREF(__pyx_n_s_NO_FTRACE);
PyList_SET_ITEM(__pyx_t_2, 3, __pyx_n_s_NO_FTRACE);
PyList_SET_ITEM(__pyx_t_2, 1, __pyx_n_s_NO_FTRACE);
__Pyx_INCREF(__pyx_n_s_EXCEPTION_TYPE_HANDLED);
__Pyx_GIVEREF(__pyx_n_s_EXCEPTION_TYPE_HANDLED);
PyList_SET_ITEM(__pyx_t_2, 4, __pyx_n_s_EXCEPTION_TYPE_HANDLED);
PyList_SET_ITEM(__pyx_t_2, 2, __pyx_n_s_EXCEPTION_TYPE_HANDLED);
__Pyx_INCREF(__pyx_n_s_EXCEPTION_TYPE_USER_UNHANDLED);
__Pyx_GIVEREF(__pyx_n_s_EXCEPTION_TYPE_USER_UNHANDLED);
PyList_SET_ITEM(__pyx_t_2, 5, __pyx_n_s_EXCEPTION_TYPE_USER_UNHANDLED);
PyList_SET_ITEM(__pyx_t_2, 3, __pyx_n_s_EXCEPTION_TYPE_USER_UNHANDLED);
__pyx_t_1 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_constants, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 152, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_dict_iter_values); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 152, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_dict_iter_values, __pyx_t_2) < 0) __PYX_ERR(0, 152, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_IS_PY3K); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 152, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_IS_PY3K, __pyx_t_2) < 0) __PYX_ERR(0, 152, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_RETURN_VALUES_DICT); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 152, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_RETURN_VALUES_DICT, __pyx_t_2) < 0) __PYX_ERR(0, 152, __pyx_L1_error)
@ -37373,7 +37361,7 @@ if (!__Pyx_RefNanny) {
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
/* "_pydevd_bundle/pydevd_cython.pyx":154
* from _pydevd_bundle.pydevd_constants import (dict_iter_values, IS_PY3K, RETURN_VALUES_DICT, NO_FTRACE,
* from _pydevd_bundle.pydevd_constants import (RETURN_VALUES_DICT, NO_FTRACE,
* EXCEPTION_TYPE_HANDLED, EXCEPTION_TYPE_USER_UNHANDLED)
* from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised, remove_exception_from_frame, ignore_exception_trace # <<<<<<<<<<<<<<
* from _pydevd_bundle.pydevd_utils import get_clsname_for_code
@ -40144,6 +40132,33 @@ static CYTHON_INLINE int __Pyx_dict_iter_next(
return 1;
}
/* CallUnboundCMethod0 */
static PyObject* __Pyx__CallUnboundCMethod0(__Pyx_CachedCFunction* cfunc, PyObject* self) {
PyObject *args, *result = NULL;
if (unlikely(!cfunc->method) && unlikely(__Pyx_TryUnpackUnboundCMethod(cfunc) < 0)) return NULL;
#if CYTHON_ASSUME_SAFE_MACROS
args = PyTuple_New(1);
if (unlikely(!args)) goto bad;
Py_INCREF(self);
PyTuple_SET_ITEM(args, 0, self);
#else
args = PyTuple_Pack(1, self);
if (unlikely(!args)) goto bad;
#endif
result = __Pyx_PyObject_Call(cfunc->method, args, NULL);
Py_DECREF(args);
bad:
return result;
}
/* py_dict_values */
static CYTHON_INLINE PyObject* __Pyx_PyDict_Values(PyObject* d) {
if (PY_MAJOR_VERSION >= 3)
return __Pyx_CallUnboundCMethod0(&__pyx_umethod_PyDict_Type_values, d);
else
return PyDict_Values(d);
}
/* DictGetItem */
#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY
static PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) {

View file

@ -149,7 +149,7 @@ import re
from _pydev_bundle import pydev_log
from _pydevd_bundle import pydevd_dont_trace
from _pydevd_bundle.pydevd_constants import (dict_iter_values, IS_PY3K, RETURN_VALUES_DICT, NO_FTRACE,
from _pydevd_bundle.pydevd_constants import (RETURN_VALUES_DICT, NO_FTRACE,
EXCEPTION_TYPE_HANDLED, EXCEPTION_TYPE_USER_UNHANDLED)
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised, remove_exception_from_frame, ignore_exception_trace
from _pydevd_bundle.pydevd_utils import get_clsname_for_code
@ -955,7 +955,7 @@ cdef class PyDBFrame:
if curr_func_name in ('?', '<module>', '<lambda>'):
curr_func_name = ''
for bp in dict_iter_values(breakpoints_for_file): # jython does not support itervalues()
for bp in breakpoints_for_file.values():
# will match either global or some function
if bp.func_name in ('None', curr_func_name):
has_breakpoint_in_frame = True
@ -1239,7 +1239,7 @@ cdef class PyDBFrame:
else:
stop = False
if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"):
if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"):
f_code = getattr(frame.f_back, 'f_code', None)
if f_code is not None:
if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE:

View file

@ -3,8 +3,6 @@
# DO NOT edit manually!
# DO NOT edit manually!
from _pydevd_bundle.pydevd_constants import IS_PY3K
LIB_FILE = 1
PYDEV_FILE = 2
@ -36,23 +34,17 @@ DONT_TRACE = {
# things from pydev that we don't want to trace
'_pydev_execfile.py':PYDEV_FILE,
'__main__pydevd_gen_debug_adapter_protocol.py': PYDEV_FILE,
'_pydev_BaseHTTPServer.py': PYDEV_FILE,
'_pydev_SimpleXMLRPCServer.py': PYDEV_FILE,
'_pydev_SocketServer.py': PYDEV_FILE,
'_pydev_calltip_util.py': PYDEV_FILE,
'_pydev_completer.py': PYDEV_FILE,
'_pydev_execfile.py': PYDEV_FILE,
'_pydev_filesystem_encoding.py': PYDEV_FILE,
'_pydev_getopt.py': PYDEV_FILE,
'_pydev_imports_tipper.py': PYDEV_FILE,
'_pydev_inspect.py': PYDEV_FILE,
'_pydev_jy_imports_tipper.py': PYDEV_FILE,
'_pydev_log.py': PYDEV_FILE,
'_pydev_pkgutil_old.py': PYDEV_FILE,
'_pydev_saved_modules.py': PYDEV_FILE,
'_pydev_sys_patch.py': PYDEV_FILE,
'_pydev_tipper_common.py': PYDEV_FILE,
'_pydev_xmlrpclib.py': PYDEV_FILE,
'django_debug.py': PYDEV_FILE,
'jinja2_debug.py': PYDEV_FILE,
'pycompletionserver.py': PYDEV_FILE,
@ -102,7 +94,6 @@ DONT_TRACE = {
'pydevd_defaults.py': PYDEV_FILE,
'pydevd_dont_trace.py': PYDEV_FILE,
'pydevd_dont_trace_files.py': PYDEV_FILE,
'pydevd_exec.py': PYDEV_FILE,
'pydevd_exec2.py': PYDEV_FILE,
'pydevd_extension_api.py': PYDEV_FILE,
'pydevd_extension_utils.py': PYDEV_FILE,
@ -155,11 +146,10 @@ DONT_TRACE = {
'scandir_vendored.py': PYDEV_FILE,
}
if IS_PY3K:
# if we try to trace io.py it seems it can get halted (see http://bugs.python.org/issue4716)
DONT_TRACE['io.py'] = LIB_FILE
# if we try to trace io.py it seems it can get halted (see http://bugs.python.org/issue4716)
DONT_TRACE['io.py'] = LIB_FILE
# Don't trace common encodings too
DONT_TRACE['cp1252.py'] = LIB_FILE
DONT_TRACE['utf_8.py'] = LIB_FILE
DONT_TRACE['codecs.py'] = LIB_FILE
# Don't trace common encodings too
DONT_TRACE['cp1252.py'] = LIB_FILE
DONT_TRACE['utf_8.py'] = LIB_FILE
DONT_TRACE['codecs.py'] = LIB_FILE

View file

@ -1,5 +0,0 @@
def Exec(exp, global_vars, local_vars=None):
if local_vars is not None:
exec exp in global_vars, local_vars
else:
exec exp in global_vars

View file

@ -4,7 +4,7 @@ import re
from _pydev_bundle import pydev_log
from _pydevd_bundle import pydevd_dont_trace
from _pydevd_bundle.pydevd_constants import (dict_iter_values, IS_PY3K, RETURN_VALUES_DICT, NO_FTRACE,
from _pydevd_bundle.pydevd_constants import (RETURN_VALUES_DICT, NO_FTRACE,
EXCEPTION_TYPE_HANDLED, EXCEPTION_TYPE_USER_UNHANDLED)
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised, remove_exception_from_frame, ignore_exception_trace
from _pydevd_bundle.pydevd_utils import get_clsname_for_code
@ -822,7 +822,7 @@ class PyDBFrame:
if curr_func_name in ('?', '<module>', '<lambda>'):
curr_func_name = ''
for bp in dict_iter_values(breakpoints_for_file): # jython does not support itervalues()
for bp in breakpoints_for_file.values():
# will match either global or some function
if bp.func_name in ('None', curr_func_name):
has_breakpoint_in_frame = True
@ -1106,7 +1106,7 @@ class PyDBFrame:
else:
stop = False
if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"):
if stop and step_cmd != -1 and is_return and hasattr(frame, "f_back"):
f_code = getattr(frame.f_back, 'f_code', None)
if f_code is not None:
if main_debugger.get_file_type(frame.f_back) == main_debugger.PYDEV_FILE:

View file

@ -1,4 +1,4 @@
from _pydevd_bundle.pydevd_constants import ForkSafeLock, get_global_debugger, IS_PY2
from _pydevd_bundle.pydevd_constants import ForkSafeLock, get_global_debugger
import os
import sys
from contextlib import contextmanager
@ -100,18 +100,9 @@ class RedirectToPyDBIoMessages(object):
return
if s:
if IS_PY2:
# Need s in utf-8 bytes
if isinstance(s, unicode): # noqa
# Note: python 2.6 does not accept the "errors" keyword.
s = s.encode('utf-8', 'replace')
else:
s = s.decode(self.encoding, 'replace').encode('utf-8', 'replace')
else:
# Need s in str
if isinstance(s, bytes):
s = s.decode(self.encoding, errors='replace')
# Need s in str
if isinstance(s, bytes):
s = s.decode(self.encoding, errors='replace')
py_db = self.get_pydb()
if py_db is not None:
@ -139,13 +130,8 @@ class IOBuf:
return ''.join(b) # bytes on py2, str on py3.
def write(self, s):
if IS_PY2:
if isinstance(s, unicode):
# can't use 'errors' as kwargs in py 2.6
s = s.encode(self.encoding, 'replace')
else:
if isinstance(s, bytes):
s = s.decode(self.encoding, errors='replace')
if isinstance(s, bytes):
s = s.decode(self.encoding, errors='replace')
self.buflist.append(s)
def isatty(self):
@ -192,7 +178,7 @@ def start_redirect(keep_original_redirection=False, std='stdout', redirect_to=No
stack = getattr(_RedirectionsHolder, '_stack_%s' % std)
if keep_original_redirection:
wrap_buffer = True if not IS_PY2 and hasattr(redirect_to, 'buffer') else False
wrap_buffer = True if hasattr(redirect_to, 'buffer') else False
new_std_instance = IORedirector(getattr(sys, std), redirect_to, wrap_buffer=wrap_buffer)
setattr(sys, std, new_std_instance)
else:
@ -224,7 +210,7 @@ def redirect_stream_to_pydb_io_messages(std):
with _RedirectionsHolder._lock:
redirect_to_name = '_pydevd_%s_redirect_' % (std,)
if getattr(_RedirectionsHolder, redirect_to_name) is None:
wrap_buffer = True if not IS_PY2 else False
wrap_buffer = True
original = getattr(sys, std)
redirect_to = RedirectToPyDBIoMessages(1 if std == 'stdout' else 2, original, wrap_buffer)

View file

@ -1,4 +1,4 @@
from _pydevd_bundle.pydevd_constants import DebugInfoHolder, IS_PY2, \
from _pydevd_bundle.pydevd_constants import DebugInfoHolder, \
get_global_debugger, GetGlobalDebugger, set_global_debugger # Keep for backward compatibility @UnusedImport
from _pydevd_bundle.pydevd_utils import quote_smart as quote, to_string
from _pydevd_bundle.pydevd_comm_constants import ID_TO_MEANING, CMD_EXIT
@ -72,13 +72,7 @@ class NetCommand(_BaseNetCommand):
self.as_dict = as_dict
text = json.dumps(as_dict)
if IS_PY2:
if isinstance(text, unicode):
text = text.encode('utf-8')
else:
assert isinstance(text, str)
else:
assert isinstance(text, str)
assert isinstance(text, str)
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1:
self._show_debug_info(cmd_id, seq, text)
@ -93,15 +87,11 @@ class NetCommand(_BaseNetCommand):
else:
msg = '%s\t%s\t%s' % (cmd_id, seq, text)
if IS_PY2:
assert isinstance(msg, str) # i.e.: bytes
as_bytes = msg
else:
if isinstance(msg, str):
msg = msg.encode('utf-8')
if isinstance(msg, str):
msg = msg.encode('utf-8')
assert isinstance(msg, bytes)
as_bytes = msg
assert isinstance(msg, bytes)
as_bytes = msg
self._as_bytes = as_bytes
def send(self, sock):

View file

@ -16,7 +16,7 @@ from _pydevd_bundle.pydevd_comm_constants import CMD_THREAD_CREATE, CMD_RETURN,
CMD_THREAD_RESUME_SINGLE_NOTIFICATION, CMD_THREAD_KILL, CMD_STOP_ON_START, CMD_INPUT_REQUESTED, \
CMD_EXIT, CMD_STEP_INTO_COROUTINE, CMD_STEP_RETURN_MY_CODE, CMD_SMART_STEP_INTO, \
CMD_SET_FUNCTION_BREAK
from _pydevd_bundle.pydevd_constants import get_thread_id, dict_values, ForkSafeLock
from _pydevd_bundle.pydevd_constants import get_thread_id, ForkSafeLock
from _pydevd_bundle.pydevd_net_command import NetCommand, NULL_NET_COMMAND
from _pydevd_bundle.pydevd_net_command_factory_xml import NetCommandFactory
from _pydevd_bundle.pydevd_utils import get_non_pydevd_threads
@ -26,11 +26,7 @@ from _pydevd_bundle.pydevd_additional_thread_info import set_additional_thread_i
from _pydevd_bundle import pydevd_frame_utils, pydevd_constants, pydevd_utils
import linecache
from _pydevd_bundle.pydevd_thread_lifecycle import pydevd_find_thread_by_id
try:
from StringIO import StringIO
except:
from io import StringIO
from io import StringIO
class ModulesManager(object):
@ -86,7 +82,7 @@ class ModulesManager(object):
:return list(Module)
'''
with self._lock:
return dict_values(self._modules)
return list(self._modules.values())
class NetCommandFactoryJson(NetCommandFactory):

View file

@ -27,11 +27,7 @@ from pydevd_tracing import get_exception_traceback_str
from _pydev_bundle._pydev_completer import completions_to_xml
from _pydev_bundle import pydev_log
from _pydevd_bundle.pydevd_frame_utils import FramesList
try:
from StringIO import StringIO
except:
from io import StringIO
from io import StringIO
if IS_IRONPYTHON:

View file

@ -11,7 +11,7 @@ from _pydevd_bundle.pydevd_breakpoints import get_exception_class
from _pydevd_bundle.pydevd_comm import (
InternalEvaluateConsoleExpression, InternalConsoleGetCompletions, InternalRunCustomOperation,
internal_get_next_statement_targets, internal_get_smart_step_into_variants)
from _pydevd_bundle.pydevd_constants import IS_PY3K, NEXT_VALUE_SEPARATOR, IS_WINDOWS, IS_PY2, NULL
from _pydevd_bundle.pydevd_constants import NEXT_VALUE_SEPARATOR, IS_WINDOWS, NULL
from _pydevd_bundle.pydevd_comm_constants import ID_TO_MEANING, CMD_EXEC_EXPRESSION, CMD_AUTHENTICATE
from _pydevd_bundle.pydevd_api import PyDevdAPI
from _pydev_bundle.pydev_imports import StringIO
@ -118,9 +118,6 @@ class _PyDevCommandProcessor(object):
return self.api.request_suspend_thread(py_db, text.strip())
def cmd_version(self, py_db, cmd_id, seq, text):
if IS_PY2 and isinstance(text, unicode):
text = text.encode('utf-8')
# Default based on server process (although ideally the IDE should
# provide it).
if IS_WINDOWS:
@ -670,9 +667,6 @@ class _PyDevCommandProcessor(object):
def cmd_ignore_thrown_exception_at(self, py_db, cmd_id, seq, text):
if text:
replace = 'REPLACE:' # Not all 3.x versions support u'REPLACE:', so, doing workaround.
if not IS_PY3K:
replace = unicode(replace) # noqa
if text.startswith(replace):
text = text[8:]
py_db.filename_to_lines_where_exceptions_are_ignored.clear()
@ -697,9 +691,6 @@ class _PyDevCommandProcessor(object):
def cmd_enable_dont_trace(self, py_db, cmd_id, seq, text):
if text:
true_str = 'true' # Not all 3.x versions support u'str', so, doing workaround.
if not IS_PY3K:
true_str = unicode(true_str) # noqa
mode = text.strip() == true_str
pydevd_dont_trace.trace_filter(mode)

View file

@ -29,7 +29,7 @@ from _pydevd_bundle.pydevd_json_debug_options import _extract_debug_options, Deb
from _pydevd_bundle.pydevd_net_command import NetCommand
from _pydevd_bundle.pydevd_utils import convert_dap_log_message_to_expression, ScopeRequest
from _pydevd_bundle.pydevd_constants import (PY_IMPL_NAME, DebugInfoHolder, PY_VERSION_STR,
PY_IMPL_VERSION_STR, IS_64BIT_PROCESS, IS_PY2)
PY_IMPL_VERSION_STR, IS_64BIT_PROCESS)
from _pydevd_bundle.pydevd_trace_dispatch import USING_CYTHON
from _pydevd_frame_eval.pydevd_frame_eval_main import USING_FRAME_EVAL
from _pydevd_bundle.pydevd_comm import internal_get_step_in_targets_json
@ -447,9 +447,6 @@ class PyDevJsonCommandProcessor(object):
new_watch_dirs = set()
for w in watch_dirs:
try:
if IS_PY2 and isinstance(w, unicode):
w = w.encode(getfilesystemencoding())
new_watch_dirs.add(pydevd_file_utils.get_path_with_real_case(pydevd_file_utils.absolute_path(w)))
except Exception:
pydev_log.exception('Error adding watch dir: %s', w)

View file

@ -1,14 +1,11 @@
from _pydev_bundle import pydev_log
from _pydevd_bundle.pydevd_utils import hasattr_checked, DAPGrouper, Timer
try:
import StringIO
except:
import io as StringIO
from io import StringIO
import traceback
from os.path import basename
from functools import partial
from _pydevd_bundle.pydevd_constants import dict_iter_items, dict_keys, xrange, IS_PY36_OR_GREATER, \
from _pydevd_bundle.pydevd_constants import xrange, IS_PY36_OR_GREATER, \
MethodWrapperType, RETURN_VALUES_DICT, DebugInfoHolder, IS_PYPY, GENERATED_LEN_ATTR_NAME
from _pydevd_bundle.pydevd_safe_repr import SafeRepr
@ -75,7 +72,7 @@ class DefaultResolver:
else:
dct = self._get_jy_dictionary(obj)[0]
lst = sorted(dict_iter_items(dct), key=lambda tup: sorted_attributes_key(tup[0]))
lst = sorted(dct.items(), key=lambda tup: sorted_attributes_key(tup[0]))
if used___dict__:
eval_name = '.__dict__[%s]'
else:
@ -158,7 +155,7 @@ class DefaultResolver:
names = []
if not names:
if hasattr_checked(var, '__dict__'):
names = dict_keys(var.__dict__)
names = list(var.__dict__)
used___dict__ = True
return names, used___dict__
@ -202,7 +199,7 @@ class DefaultResolver:
continue
except:
# if some error occurs getting it, let's put it to the user.
strIO = StringIO.StringIO()
strIO = StringIO()
traceback.print_exc(file=strIO)
attr = strIO.getvalue()
@ -251,7 +248,7 @@ class DictResolver:
sort_keys = not IS_PY36_OR_GREATER
def resolve(self, dict, key):
def resolve(self, dct, key):
if key in (GENERATED_LEN_ATTR_NAME, TOO_LARGE_ATTR):
return None
@ -259,14 +256,14 @@ class DictResolver:
# we have to treat that because the dict resolver is also used to directly resolve the global and local
# scopes (which already have the items directly)
try:
return dict[key]
return dct[key]
except:
return getattr(dict, key)
return getattr(dct, key)
# ok, we have to iterate over the items to find the one that matches the id, because that's the only way
# to actually find the reference from the string we have before.
expected_id = int(key.split('(')[-1][:-1])
for key, val in dict_iter_items(dict):
for key, val in dct.items():
if id(key) == expected_id:
return val
@ -299,7 +296,7 @@ class DictResolver:
found_representations = set()
for key, val in dict_iter_items(dct):
for key, val in dct.items():
i += 1
key_as_str = self.key_to_str(key, fmt)
@ -334,11 +331,11 @@ class DictResolver:
ret.append((GENERATED_LEN_ATTR_NAME, len(dct), partial(_apply_evaluate_name, evaluate_name='len(%s)')))
return ret
def get_dictionary(self, dict):
def get_dictionary(self, dct):
ret = self.init_dict()
i = 0
for key, val in dict_iter_items(dict):
for key, val in dct.items():
i += 1
# we need to add the id because otherwise we cannot find the real object to get its contents later on.
key = '%s (%s)' % (self.key_to_str(key), id(key))
@ -348,9 +345,9 @@ class DictResolver:
break
# in case if the class extends built-in type and has some additional fields
additional_fields = defaultResolver.get_dictionary(dict)
additional_fields = defaultResolver.get_dictionary(dct)
ret.update(additional_fields)
ret[GENERATED_LEN_ATTR_NAME] = len(dict)
ret[GENERATED_LEN_ATTR_NAME] = len(dct)
return ret
@ -556,15 +553,15 @@ class JyArrayResolver:
#=======================================================================================================================
class MultiValueDictResolver(DictResolver):
def resolve(self, dict, key):
def resolve(self, dct, key):
if key in (GENERATED_LEN_ATTR_NAME, TOO_LARGE_ATTR):
return None
# ok, we have to iterate over the items to find the one that matches the id, because that's the only way
# to actually find the reference from the string we have before.
expected_id = int(key.split('(')[-1][:-1])
for key in dict_keys(dict):
val = dict.getlist(key)
for key in list(dct.keys()):
val = dct.getlist(key)
if id(key) == expected_id:
return val

View file

@ -4,7 +4,7 @@
# Gotten from ptvsd for supporting the format expected there.
import sys
from _pydevd_bundle.pydevd_constants import IS_PY2, IS_PY36_OR_GREATER
from _pydevd_bundle.pydevd_constants import IS_PY36_OR_GREATER
import locale
from _pydev_bundle import pydev_log
@ -93,10 +93,7 @@ class SafeRepr(object):
Returns bytes encoded as utf-8 on py2 and str on py3.
'''
try:
if IS_PY2:
return ''.join((x.encode('utf-8') if isinstance(x, unicode) else x) for x in self._repr(obj, 0))
else:
return ''.join(self._repr(obj, 0))
return ''.join(self._repr(obj, 0))
except Exception:
try:
return 'An exception was raised: %r' % sys.exc_info()[1]
@ -387,56 +384,11 @@ class SafeRepr(object):
# you are using the wrong class.
left_count, right_count = max(1, int(2 * limit / 3)), max(1, int(limit / 3)) # noqa
if IS_PY2 and isinstance(obj_repr, self.bytes):
# If we can convert to unicode before slicing, that's better (but don't do
# it if it's not possible as we may be dealing with actual binary data).
obj_repr = self._bytes_as_unicode_if_possible(obj_repr)
if isinstance(obj_repr, unicode):
# Deal with high-surrogate leftovers on Python 2.
try:
if left_count > 0 and unichr(0xD800) <= obj_repr[left_count - 1] <= unichr(0xDBFF):
left_count -= 1
except ValueError:
# On Jython unichr(0xD800) will throw an error:
# ValueError: unichr() arg is a lone surrogate in range (0xD800, 0xDFFF) (Jython UTF-16 encoding)
# Just ignore it in this case.
pass
start = obj_repr[:left_count]
# Note: yielding unicode is fine (it'll be properly converted to utf-8 if needed).
yield start
yield '...'
# Deal with high-surrogate leftovers on Python 2.
try:
if right_count > 0 and unichr(0xD800) <= obj_repr[-right_count - 1] <= unichr(0xDBFF):
right_count -= 1
except ValueError:
# On Jython unichr(0xD800) will throw an error:
# ValueError: unichr() arg is a lone surrogate in range (0xD800, 0xDFFF) (Jython UTF-16 encoding)
# Just ignore it in this case.
pass
yield obj_repr[-right_count:]
return
else:
# We can't decode it (binary string). Use repr() of bytes.
obj_repr = repr(obj_repr)
yield obj_repr[:left_count]
yield '...'
yield obj_repr[-right_count:]
def _convert_to_unicode_or_bytes_repr(self, obj_repr):
if IS_PY2 and isinstance(obj_repr, self.bytes):
obj_repr = self._bytes_as_unicode_if_possible(obj_repr)
if isinstance(obj_repr, self.bytes):
# If we haven't been able to decode it this means it's some binary data
# we can't make sense of, so, we need its repr() -- otherwise json
# encoding may break later on.
obj_repr = repr(obj_repr)
return obj_repr
def _bytes_as_unicode_if_possible(self, obj_repr):

View file

@ -10,7 +10,7 @@ else:
import os
from _pydevd_bundle.pydevd_comm import CMD_SIGNATURE_CALL_TRACE, NetCommand
from _pydevd_bundle import pydevd_xml
from _pydevd_bundle.pydevd_constants import xrange, dict_iter_items
from _pydevd_bundle.pydevd_constants import xrange
from _pydevd_bundle.pydevd_utils import get_clsname_for_code
@ -62,7 +62,7 @@ def get_type_of_value(value, ignore_module_name=('__main__', '__builtin__', 'bui
if class_name == 'dict':
class_name = 'Dict'
if len(value) > 0 and recursive:
for (k, v) in dict_iter_items(value):
for (k, v) in value.items():
class_name += '[%s, %s]' % (get_type_of_value(k, recursive=recursive),
get_type_of_value(v, recursive=recursive))
break

View file

@ -1,5 +1,5 @@
import bisect
from _pydevd_bundle.pydevd_constants import dict_items, NULL, KeyifyList
from _pydevd_bundle.pydevd_constants import NULL, KeyifyList
import pydevd_file_utils
@ -86,7 +86,7 @@ class SourceMapping(object):
try:
return self._cache[key]
except KeyError:
for _, mapping in dict_items(self._mappings_to_server):
for _, mapping in list(self._mappings_to_server.items()):
for map_entry in mapping:
if map_entry.runtime_source == runtime_source_filename: # <cell1>
if map_entry.contains_runtime_line(lineno): # matches line range
@ -107,7 +107,7 @@ class SourceMapping(object):
try:
return self._cache[key]
except KeyError:
for _absolute_normalized_filename, mapping in dict_items(self._mappings_to_server):
for _absolute_normalized_filename, mapping in list(self._mappings_to_server.items()):
for map_entry in mapping:
if map_entry.runtime_source == runtime_source_filename:
self._cache[key] = True

View file

@ -6,7 +6,6 @@ import sys
from _pydevd_bundle.pydevd_comm import get_global_debugger
from _pydevd_bundle.pydevd_constants import call_only_once
from _pydev_imps._pydev_saved_modules import threading
from _pydevd_bundle.pydevd_constants import dict_items
from _pydevd_bundle.pydevd_custom_frames import update_custom_frame, remove_custom_frame, add_custom_frame
import stackless # @UnresolvedImport
from _pydev_bundle import pydev_log
@ -201,7 +200,7 @@ def _schedule_callback(prev, next):
register_tasklet_info(prev)
try:
for tasklet_ref, tasklet_info in dict_items(_weak_tasklet_registered_to_info): # Make sure it's a copy!
for tasklet_ref, tasklet_info in list(_weak_tasklet_registered_to_info.items()): # Make sure it's a copy!
tasklet = tasklet_ref()
if tasklet is None or not tasklet.alive:
# Garbage-collected already!
@ -276,7 +275,7 @@ if not hasattr(stackless.tasklet, "trace_function"):
register_tasklet_info(prev)
try:
for tasklet_ref, tasklet_info in dict_items(_weak_tasklet_registered_to_info): # Make sure it's a copy!
for tasklet_ref, tasklet_info in list(_weak_tasklet_registered_to_info.items()): # Make sure it's a copy!
tasklet = tasklet_ref()
if tasklet is None or not tasklet.alive:
# Garbage-collected already!

View file

@ -1,8 +1,8 @@
from contextlib import contextmanager
import sys
from _pydevd_bundle.pydevd_constants import get_frame, dict_items, RETURN_VALUES_DICT, \
dict_iter_items, ForkSafeLock, GENERATED_LEN_ATTR_NAME, silence_warnings_decorator
from _pydevd_bundle.pydevd_constants import get_frame, RETURN_VALUES_DICT, \
ForkSafeLock, GENERATED_LEN_ATTR_NAME, silence_warnings_decorator
from _pydevd_bundle.pydevd_xml import get_variable_details, get_type
from _pydev_bundle.pydev_override import overrides
from _pydevd_bundle.pydevd_resolver import sorted_attributes_key, TOO_LARGE_ATTR, get_var_scope
@ -169,8 +169,7 @@ class _ObjectVariable(_AbstractVariable):
else:
# If there's no special implementation, the default is sorting the keys.
dct = resolver.get_dictionary(self.value)
lst = dict_items(dct)
lst.sort(key=lambda tup: sorted_attributes_key(tup[0]))
lst = sorted(dct.items(), key=lambda tup: sorted_attributes_key(tup[0]))
# No evaluate name in this case.
lst = [(key, value, None) for (key, value) in lst]
@ -276,7 +275,7 @@ class _FrameVariable(_AbstractVariable):
else:
raise AssertionError('Unexpected scope: %s' % (scope,))
lst, group_entries = self._group_entries([(x[0], x[1], None) for x in dict_items(dct) if x[0] != '_pydev_stop_at_break'], handle_return_values=True)
lst, group_entries = self._group_entries([(x[0], x[1], None) for x in list(dct.items()) if x[0] != '_pydev_stop_at_break'], handle_return_values=True)
group_variables = []
for key, val, _ in group_entries:
@ -288,7 +287,7 @@ class _FrameVariable(_AbstractVariable):
for key, val, _ in lst:
is_return_value = key == RETURN_VALUES_DICT
if is_return_value:
for return_key, return_value in dict_iter_items(val):
for return_key, return_value in val.items():
variable = _ObjectVariable(
self.py_db, return_key, return_value, self._register_variable, is_return_value, '%s[%r]' % (key, return_key), frame=self.frame)
children_variables.append(variable)
@ -452,7 +451,7 @@ class SuspendedFramesManager(object):
if tracker is not None:
return tracker
for _thread_id, tracker in dict_iter_items(self._thread_id_to_tracker):
for _thread_id, tracker in self._thread_id_to_tracker.items():
try:
tracker.get_variable(variable_reference)
except KeyError:

View file

@ -1,7 +1,6 @@
'''For debug purpose we are replacing actual builtin property by the debug property
'''
from _pydevd_bundle.pydevd_comm import get_global_debugger
from _pydevd_bundle.pydevd_constants import DebugInfoHolder, IS_PY2
from _pydev_bundle import pydev_log
@ -12,18 +11,11 @@ def replace_builtin_property(new_property=None):
if new_property is None:
new_property = DebugProperty
original = property
if IS_PY2:
try:
import __builtin__
__builtin__.__dict__['property'] = new_property
except:
pydev_log.exception() # @Reimport
else:
try:
import builtins # Python 3.0 does not have the __builtin__ module @UnresolvedImport
builtins.__dict__['property'] = new_property
except:
pydev_log.exception() # @Reimport
try:
import builtins
builtins.__dict__['property'] = new_property
except:
pydev_log.exception() # @Reimport
return original

View file

@ -9,15 +9,12 @@ import os
import ctypes
from importlib import import_module
try:
from urllib import quote
except:
from urllib.parse import quote # @UnresolvedImport
from urllib.parse import quote # @UnresolvedImport
import time
import inspect
import sys
from _pydevd_bundle.pydevd_constants import IS_PY3K, USE_CUSTOM_SYS_CURRENT_FRAMES, IS_PYPY, SUPPORT_GEVENT, \
from _pydevd_bundle.pydevd_constants import USE_CUSTOM_SYS_CURRENT_FRAMES, IS_PYPY, SUPPORT_GEVENT, \
GEVENT_SUPPORT_NOT_SET_MSG, GENERATED_LEN_ATTR_NAME, PYDEVD_WARN_SLOW_RESOLVE_TIMEOUT, \
get_global_debugger
from _pydev_imps._pydev_saved_modules import threading
@ -94,19 +91,12 @@ def compare_object_attrs_key(x):
return (-1, to_string(x))
if IS_PY3K:
def is_string(x):
return isinstance(x, str)
else:
def is_string(x):
return isinstance(x, basestring)
def is_string(x):
return isinstance(x, str)
def to_string(x):
if is_string(x):
if isinstance(x, str):
return x
else:
return str(x)
@ -117,18 +107,8 @@ def print_exc():
traceback.print_exc()
if IS_PY3K:
def quote_smart(s, safe='/'):
return quote(s, safe)
else:
def quote_smart(s, safe='/'):
if isinstance(s, unicode):
s = s.encode('utf-8')
return quote(s, safe)
def quote_smart(s, safe='/'):
return quote(s, safe)
def get_clsname_for_code(code, frame):

View file

@ -2,26 +2,19 @@
resolution/conversion to XML.
"""
import pickle
from _pydevd_bundle.pydevd_constants import get_frame, get_current_thread_id, xrange, IS_PY2, \
iter_chars, silence_warnings_decorator, dict_iter_items
from _pydevd_bundle.pydevd_constants import get_frame, get_current_thread_id, xrange, \
iter_chars, silence_warnings_decorator
from _pydevd_bundle.pydevd_xml import ExceptionOnEvaluate, get_type, var_to_xml
from _pydev_bundle import pydev_log
import codecs
import os
import functools
from _pydevd_bundle.pydevd_thread_lifecycle import resume_threads, mark_thread_suspended, suspend_all_threads
from _pydevd_bundle.pydevd_comm_constants import CMD_SET_BREAK
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import sys # @Reimport
from _pydev_imps._pydev_saved_modules import threading
import traceback
from _pydevd_bundle import pydevd_save_locals, pydevd_timeout, pydevd_constants, pydevd_utils
from _pydevd_bundle import pydevd_save_locals, pydevd_timeout, pydevd_constants
from _pydev_bundle.pydev_imports import Exec, execfile
from _pydevd_bundle.pydevd_utils import to_string
@ -244,13 +237,6 @@ def _expression_to_evaluate(expression):
else:
expression = u''.join(new_lines)
if IS_PY2 and isinstance(expression, unicode):
# In Python 2 we need to compile with bytes and not unicode (otherwise it'd use
# the default encoding which could be ascii).
# See https://github.com/microsoft/ptvsd/issues/1864 and https://bugs.python.org/issue18870
# for why we're using the utf-8 bom.
# i.e.: ... if an utf-8 bom is present, it is considered utf-8 in eval/exec.
expression = codecs.BOM_UTF8 + expression.encode('utf-8')
return expression
@ -267,9 +253,6 @@ def eval_in_context(expression, globals, locals=None):
# Ok, we have the initial error message, but let's see if we're dealing with a name mangling error...
try:
if IS_PY2 and isinstance(expression, unicode):
expression = expression.encode('utf-8')
if '.__' in expression:
# Try to handle '__' name mangling (for simple cases such as self.__variable.__another_var).
split = expression.split('.')
@ -395,7 +378,7 @@ def _update_globals_and_locals(updated_globals, initial_globals, frame):
# one that enabled creating and using variables during the same evaluation.
assert updated_globals is not None
changed = False
for key, val in dict_iter_items(updated_globals):
for key, val in updated_globals.items():
if initial_globals.get(key) is not val:
changed = True
frame.f_locals[key] = val
@ -466,10 +449,7 @@ def evaluate_expression(py_db, frame, expression, is_exec):
updated_locals = None
try:
if IS_PY2 and isinstance(expression, unicode):
expression = expression.replace(u'@LINE@', u'\n')
else:
expression = expression.replace('@LINE@', '\n')
expression = expression.replace('@LINE@', '\n')
if is_exec:
try:
@ -488,11 +468,6 @@ def evaluate_expression(py_db, frame, expression, is_exec):
else:
result = eval(compiled, updated_globals, updated_locals)
if result is not None: # Only print if it's not None (as python does)
if IS_PY2 and isinstance(result, unicode):
encoding = sys.stdout.encoding
if not encoding:
encoding = os.environ.get('PYTHONIOENCODING', 'utf-8')
result = result.encode(encoding, 'replace')
sys.stdout.write('%s\n' % (result,))
return

View file

@ -2,12 +2,11 @@ from _pydev_bundle import pydev_log
from _pydevd_bundle import pydevd_extension_utils
from _pydevd_bundle import pydevd_resolver
import sys
from _pydevd_bundle.pydevd_constants import dict_iter_items, dict_keys, IS_PY3K, \
BUILTINS_MODULE_NAME, MAXIMUM_VARIABLE_REPRESENTATION_SIZE, RETURN_VALUES_DICT, LOAD_VALUES_ASYNC, \
DEFAULT_VALUE
from _pydevd_bundle.pydevd_constants import BUILTINS_MODULE_NAME, MAXIMUM_VARIABLE_REPRESENTATION_SIZE, \
RETURN_VALUES_DICT, LOAD_VALUES_ASYNC, DEFAULT_VALUE
from _pydev_bundle.pydev_imports import quote
from _pydevd_bundle.pydevd_extension_api import TypeResolveProvider, StrPresentationProvider
from _pydevd_bundle.pydevd_utils import isinstance_checked, hasattr_checked, DAPGrouper, Timer
from _pydevd_bundle.pydevd_utils import isinstance_checked, hasattr_checked, DAPGrouper
from _pydevd_bundle.pydevd_resolver import get_var_scope
try:
@ -253,25 +252,21 @@ def should_evaluate_full_value(val):
def return_values_from_dict_to_xml(return_dict):
res = ""
for name, val in dict_iter_items(return_dict):
res += var_to_xml(val, name, additional_in_xml=' isRetVal="True"')
return res
res = []
for name, val in return_dict.items():
res.append(var_to_xml(val, name, additional_in_xml=' isRetVal="True"'))
return ''.join(res)
def frame_vars_to_xml(frame_f_locals, hidden_ns=None):
""" dumps frame variables to XML
<var name="var_name" scope="local" type="type" value="value"/>
"""
xml = ""
xml = []
keys = dict_keys(frame_f_locals)
if hasattr(keys, 'sort'):
keys.sort() # Python 3.0 does not have it
else:
keys = sorted(keys) # Jython 2.1 does not have it
keys = sorted(frame_f_locals)
return_values_xml = ''
return_values_xml = []
for k in keys:
try:
@ -282,20 +277,21 @@ def frame_vars_to_xml(frame_f_locals, hidden_ns=None):
continue
if k == RETURN_VALUES_DICT:
for name, val in dict_iter_items(v):
return_values_xml += var_to_xml(val, name, additional_in_xml=' isRetVal="True"')
for name, val in v.items():
return_values_xml.append(var_to_xml(val, name, additional_in_xml=' isRetVal="True"'))
else:
if hidden_ns is not None and k in hidden_ns:
xml += var_to_xml(v, str(k), additional_in_xml=' isIPythonHidden="True"',
evaluate_full_value=eval_full_val)
xml.append(var_to_xml(v, str(k), additional_in_xml=' isIPythonHidden="True"',
evaluate_full_value=eval_full_val))
else:
xml += var_to_xml(v, str(k), evaluate_full_value=eval_full_val)
xml.append(var_to_xml(v, str(k), evaluate_full_value=eval_full_val))
except Exception:
pydev_log.exception("Unexpected error, recovered safely.")
# Show return values as the first entry.
return return_values_xml + xml
return_values_xml.extend(xml)
return ''.join(return_values_xml)
def get_variable_details(val, evaluate_full_value=True, to_string=None):
@ -357,12 +353,8 @@ def get_variable_details(val, evaluate_full_value=True, to_string=None):
# fix to work with unicode values
try:
if not IS_PY3K:
if value.__class__ == unicode: # @UndefinedVariable
value = value.encode('utf-8', 'replace')
else:
if value.__class__ == bytes:
value = value.decode('utf-8', 'replace')
if value.__class__ == bytes:
value = value.decode('utf-8', 'replace')
except TypeError:
pass

View file

@ -7,8 +7,6 @@ import sys
miniconda64_envs = os.getenv('MINICONDA64_ENVS')
python_installations = [
r'%s/py26_64/bin/python' % miniconda64_envs,
r'%s/py27_64/bin/python' % miniconda64_envs,
r'%s/py34_64/bin/python' % miniconda64_envs,
r'%s/py35_64/bin/python' % miniconda64_envs,
r'%s/py36_64/bin/python' % miniconda64_envs,

View file

@ -16,17 +16,18 @@ import sys
miniconda_envs = os.getenv('MINICONDA_ENVS', r'C:\bin\Miniconda3\envs')
python_installations = [
r'%s\py27_32\python.exe' % miniconda_envs,
r'%s\py36_32\python.exe' % miniconda_envs,
r'%s\py37_32\python.exe' % miniconda_envs,
r'%s\py38_32\python.exe' % miniconda_envs,
r'%s\py39_32\python.exe' % miniconda_envs,
r'%s\py310_32\python.exe' % miniconda_envs,
r'%s\py27_64\python.exe' % miniconda_envs,
r'%s\py36_64\python.exe' % miniconda_envs,
r'%s\py37_64\python.exe' % miniconda_envs,
r'%s\py38_64\python.exe' % miniconda_envs,
r'%s\py39_64\python.exe' % miniconda_envs,
r'%s\py310_64\python.exe' % miniconda_envs,
]
root_dir = os.path.dirname(os.path.dirname(__file__))
@ -69,12 +70,14 @@ def main():
python_install, os.path.join(root_dir, 'build_tools', 'build.py'), '--no-remove-binaries', '--target-pyd-name=%s' % new_name, '--force-cython']
if i != 0:
args.append('--no-regenerate-files')
version_number = extract_version(python_install)
if version_number.startswith('36') or version_number.startswith('37'):
name_frame_eval = 'pydevd_frame_evaluator_%s_%s' % (sys.platform, extract_version(python_install))
args.append('--target-pyd-frame-eval=%s' % name_frame_eval)
name_frame_eval = 'pydevd_frame_evaluator_%s_%s' % (sys.platform, extract_version(python_install))
args.append('--target-pyd-frame-eval=%s' % name_frame_eval)
print('Calling: %s' % (' '.join(args)))
subprocess.check_call(args)
env = os.environ.copy()
python_exe_dir = os.path.dirname(python_install)
env['PATH'] = env['PATH'] + ';' + os.path.join(python_exe_dir, 'DLLs') + ';' + os.path.join(python_exe_dir, 'Library', 'bin')
subprocess.check_call(args, env=env)
if __name__ == '__main__':

View file

@ -103,8 +103,6 @@ def generate_dont_trace_files():
# DO NOT edit manually!
# DO NOT edit manually!
from _pydevd_bundle.pydevd_constants import IS_PY3K
LIB_FILE = 1
PYDEV_FILE = 2
@ -128,14 +126,13 @@ DONT_TRACE = {
%(pydev_files)s
}
if IS_PY3K:
# if we try to trace io.py it seems it can get halted (see http://bugs.python.org/issue4716)
DONT_TRACE['io.py'] = LIB_FILE
# if we try to trace io.py it seems it can get halted (see http://bugs.python.org/issue4716)
DONT_TRACE['io.py'] = LIB_FILE
# Don't trace common encodings too
DONT_TRACE['cp1252.py'] = LIB_FILE
DONT_TRACE['utf_8.py'] = LIB_FILE
DONT_TRACE['codecs.py'] = LIB_FILE
# Don't trace common encodings too
DONT_TRACE['cp1252.py'] = LIB_FILE
DONT_TRACE['utf_8.py'] = LIB_FILE
DONT_TRACE['codecs.py'] = LIB_FILE
'''
pydev_files = []

View file

@ -9,13 +9,6 @@ __version_info__ in pydevd.py
set CONDA_FORCE_32BIT=1
conda create -y -f -n py27_32 python=2.7 cython numpy nose ipython pip
activate py27_32
pip install "django>=1.7,<1.8"
pip install -U "setuptools>=0.9"
pip install -U "pip>=1.4" "wheel>=0.21" twine
conda deactivate
conda create -y -f -n py36_32 python=3.6 cython numpy nose ipython pip
activate py36_32
pip install "django>=1.9"
@ -44,17 +37,16 @@ pip install -U "setuptools>=0.9"
pip install -U "pip>=1.4" "wheel>=0.21" twine
conda deactivate
set CONDA_FORCE_32BIT=
conda create -y -f -n py27_64 python=2.7 cython numpy nose ipython pip
activate py27_64
pip install "django>=1.7,<1.8"
conda create -y -f -n py310_32 python=3.10 cython pip
activate py310_32
pip install "django>=1.9"
pip install -U "setuptools>=0.9"
pip install -U "pip>=1.4" "wheel>=0.21" twine
conda deactivate
set CONDA_FORCE_32BIT=
conda create -y -f -n py36_64 python=3.6 cython numpy nose ipython pip
activate py36_64
pip install "django>=1.9"
@ -83,50 +75,57 @@ pip install -U "setuptools>=0.9"
pip install -U "pip>=1.4" "wheel>=0.21" twine
conda deactivate
conda create -y -f -n py310_64 python=3.10 cython pip
activate py310_64
pip install "django>=1.9"
pip install -U "setuptools>=0.9"
pip install -U "pip>=1.4" "wheel>=0.21" twine
conda deactivate
### UPDATE CYTHON
set CONDA_FORCE_32BIT=1
activate py27_32
conda update -y cython
conda deactivate
activate py36_32
conda update -y cython
pip install cython==0.29.28
conda deactivate
activate py37_32
conda update -y cython
pip install cython==0.29.28
conda deactivate
activate py38_32
conda update -y cython
pip install cython==0.29.28
conda deactivate
activate py39_32
conda update -y cython
pip install cython==0.29.28
conda deactivate
activate py310_32
pip install cython==0.29.28
conda deactivate
set CONDA_FORCE_32BIT=
activate py27_64
conda update -y cython
conda deactivate
activate py36_64
conda update -y cython
pip install cython==0.29.28
conda deactivate
activate py37_64
conda update -y cython
pip install cython==0.29.28
conda deactivate
activate py38_64
conda update -y cython
pip install cython==0.29.28
conda deactivate
activate py39_64
conda update -y cython
pip install cython==0.29.28
conda deactivate
activate py310_64
pip install cython==0.29.28
conda deactivate
4. Regenerate the .pyx and .c
@ -159,11 +158,6 @@ C:\bin\Python38-32\python build_tools\build_binaries_windows.py
rm dist/pydevd*
activate py27_32
python setup.py sdist bdist_wheel
conda deactivate
dir dist
activate py36_32
python setup.py sdist bdist_wheel
conda deactivate
@ -184,12 +178,7 @@ python setup.py sdist bdist_wheel
conda deactivate
dir dist
activate py27_64
python setup.py sdist bdist_wheel
conda deactivate
dir dist
activate py35_64
activate py310_32
python setup.py sdist bdist_wheel
conda deactivate
dir dist
@ -214,9 +203,14 @@ python setup.py sdist bdist_wheel
conda deactivate
dir dist
activate py310_64
python setup.py sdist bdist_wheel
conda deactivate
dir dist
# Note: uploading with twine gives an error in the end, but apparently it works (check final result in pypi).
twine upload dist/pydevd*
git tag pydev_debugger_2_3_0 -a -m "PyDev.Debugger 2.3.0"
git tag pydev_debugger_2_8_0 -a -m "PyDev.Debugger 2.8.0"
git push --tags

View file

@ -8,12 +8,15 @@ import names_to_rename
_CAMEL_RE = re.compile(r'(?<=[a-z])([A-Z])')
_CAMEL_DEF_RE = re.compile(r'(def )((([A-Z0-9]+|[a-z0-9])[a-z][a-z0-9]*[A-Z]|[a-z0-9]*[A-Z][A-Z0-9]*[a-z])[A-Za-z0-9]*)')
def _normalize(name):
return _CAMEL_RE.sub(lambda x: '_' + x.group(1).lower(), name).lower()
def find_matches_in_contents(contents):
return [x[1] for x in re.findall(_CAMEL_DEF_RE, contents)]
def iter_files_in_dir(dirname):
for root, dirs, files in os.walk(dirname):
for name in ('pydevd_attach_to_process', '.git', 'stubs', 'pydev_ipython', 'third_party', 'pydev_ipython'):
@ -29,12 +32,14 @@ def iter_files_in_dir(dirname):
yield path, initial_contents
def find_matches():
found = set()
for path, initial_contents in iter_files_in_dir(os.path.dirname(os.path.dirname(__file__))):
found.update(find_matches_in_contents(initial_contents))
print '\n'.join(sorted(found))
print 'Total', len(found)
print('\n'.join(sorted(found)))
print('Total', len(found))
def substitute_contents(re_name_to_new_val, initial_contents):
contents = initial_contents
@ -42,13 +47,14 @@ def substitute_contents(re_name_to_new_val, initial_contents):
contents = re.sub(key, val, contents)
return contents
def make_replace():
re_name_to_new_val = load_re_to_new_val(names_to_rename.NAMES)
# traverse root directory, and list directories as dirs and files as files
for path, initial_contents in iter_files_in_dir(os.path.dirname(os.path.dirname(__file__))):
contents = substitute_contents(re_name_to_new_val, initial_contents)
if contents != initial_contents:
print 'Changed something at: %s' % (path,)
print('Changed something at: %s' % (path,))
for val in re_name_to_new_val.itervalues():
# Check in initial contents to see if it already existed!
@ -64,9 +70,10 @@ def load_re_to_new_val(names):
for n in names.splitlines():
n = n.strip()
if not n.startswith('#') and n:
name_to_new_val[r'\b'+n+r'\b'] = _normalize(n)
name_to_new_val[r'\b' + n + r'\b'] = _normalize(n)
return name_to_new_val
def test():
assert _normalize('RestoreSysSetTraceFunc') == 'restore_sys_set_trace_func'
assert _normalize('restoreSysSetTraceFunc') == 'restore_sys_set_trace_func'
@ -116,6 +123,7 @@ dict_pop
dict_values
'''
if __name__ == '__main__':
# find_matches()
make_replace()

View file

@ -15,12 +15,15 @@ import sys
try:
import os.path
def fully_normalize_path(path):
'''fixes the path so that the format of the path really reflects the directories in the system
'''
return os.path.normpath(path)
join = os.path.join
except: # ImportError or AttributeError.
# See: http://stackoverflow.com/questions/10254353/error-while-installing-jython-for-pydev
def fully_normalize_path(path):
'''fixes the path so that the format of the path really reflects the directories in the system
@ -32,7 +35,6 @@ except: # ImportError or AttributeError.
return a + b
return a + '/' + b
IS_PYTHON_3_ONWARDS = 0
try:
@ -50,11 +52,7 @@ except:
if sys.platform == "cygwin":
try:
import ctypes # use from the system if available
except ImportError:
sys.path.append(join(sys.path[0], 'third_party/wrapped_for_pydev'))
import ctypes
import ctypes
def native_path(path):
MAX_PATH = 512 # On cygwin NT, its 260 lately, but just need BIG ENOUGH buffer
@ -64,17 +62,17 @@ if sys.platform == "cygwin":
path = fully_normalize_path(path)
path = tobytes(path)
CCP_POSIX_TO_WIN_A = 0
cygwin1dll = ctypes.cdll.LoadLibrary( 'cygwin1.dll' )
cygwin1dll = ctypes.cdll.LoadLibrary('cygwin1.dll')
cygwin1dll.cygwin_conv_path(CCP_POSIX_TO_WIN_A, path, retval, MAX_PATH)
return retval.value
else:
def native_path(path):
return fully_normalize_path(path)
def __getfilesystemencoding():
'''
Note: there's a copy of this method in _pydev_filesystem_encoding.py
@ -100,11 +98,12 @@ def __getfilesystemencoding():
return 'mbcs'
return 'utf-8'
def getfilesystemencoding():
try:
ret = __getfilesystemencoding()
#Check if the encoding is actually there to be used!
# Check if the encoding is actually there to be used!
if hasattr('', 'encode'):
''.encode(ret)
if hasattr('', 'decode'):
@ -114,6 +113,7 @@ def getfilesystemencoding():
except:
return 'utf-8'
file_system_encoding = getfilesystemencoding()
if IS_PYTHON_3_ONWARDS:
@ -132,12 +132,14 @@ def tounicode(s):
return s.decode(file_system_encoding)
return s
def tobytes(s):
if hasattr(s, 'encode'):
if not isinstance(s, bytes_type):
return s.encode(file_system_encoding)
return s
def toasciimxl(s):
# output for xml without a declared encoding
@ -179,7 +181,6 @@ if __name__ == '__main__':
if sys.platform == "cygwin" and not executable.endswith(tounicode('.exe')):
executable += tounicode('.exe')
try:
major = str(sys.version_info[0])
minor = str(sys.version_info[1])
@ -204,7 +205,6 @@ if __name__ == '__main__':
prefix = tounicode(native_path(sys.prefix))
# print_ 'prefix is', prefix
result = []
path_used = sys.path
@ -242,7 +242,6 @@ if __name__ == '__main__':
for builtinMod in sys.builtin_module_names:
contents.append(tounicode('<forced_lib>%s</forced_lib>') % tounicode(builtinMod))
contents.append(tounicode('</xml>'))
unic = tounicode('\n').join(contents)
inasciixml = toasciimxl(unic)

View file

@ -3,14 +3,6 @@ Entry-point module to start the code-completion server for PyDev.
@author Fabio Zadrozny
'''
import sys
IS_PYTHON_3_ONWARDS = sys.version_info[0] >= 3
if not IS_PYTHON_3_ONWARDS:
import __builtin__
else:
import builtins as __builtin__ # Python 3.0
from _pydevd_bundle.pydevd_constants import IS_JYTHON
if IS_JYTHON:
@ -24,7 +16,6 @@ else:
SERVER_NAME = 'pycompletionserver'
from _pydev_bundle import _pydev_imports_tipper
from _pydev_imps._pydev_saved_modules import socket
import sys
@ -35,7 +26,6 @@ if sys.platform == "darwin":
except:
pass
# initial sys.path
_sys_path = []
for p in sys.path:
@ -47,20 +37,11 @@ _sys_modules = {}
for name, mod in sys.modules.items():
_sys_modules[name] = mod
import traceback
from _pydev_imps._pydev_saved_modules import time
from io import StringIO
try:
import StringIO
except:
import io as StringIO #Python 3.0
try:
from urllib import quote_plus, unquote_plus
except ImportError:
from urllib.parse import quote_plus, unquote_plus #Python 3.0
from urllib.parse import quote_plus, unquote_plus
INFO1 = 1
INFO2 = 2
@ -69,6 +50,7 @@ ERROR = 8
DEBUG = INFO1 | ERROR
def dbg(s, prior):
if prior & DEBUG != 0:
sys.stdout.write('%s\n' % (s,))
@ -76,8 +58,9 @@ def dbg(s, prior):
# print_ >> f, s
# f.close()
from _pydev_bundle import pydev_localhost
HOST = pydev_localhost.get_localhost() # Symbolic name meaning the local host
HOST = pydev_localhost.get_localhost() # Symbolic name meaning the local host
MSG_KILL_SERVER = '@@KILL_SERVER_END@@'
MSG_COMPLETIONS = '@@COMPLETIONS'
@ -94,10 +77,9 @@ MSG_SEARCH = '@@SEARCH'
BUFFER_SIZE = 1024
currDirModule = None
def complete_from_dir(directory):
'''
This is necessary so that we get the imports from the same directory where the file
@ -173,9 +155,11 @@ class Processor:
return '%s(%s)%s' % (MSG_COMPLETIONS, ''.join(compMsg), MSG_END)
class Exit(Exception):
pass
class CompletionServer:
def __init__(self, port):
@ -185,7 +169,6 @@ class CompletionServer:
self.exit_process_on_kill = True
self.processor = Processor()
def connect_to_server(self):
from _pydev_imps._pydev_saved_modules import socket
@ -225,17 +208,8 @@ class CompletionServer:
return
totalsent = totalsent + sent
def send(self, msg):
if not hasattr(self.socket, 'sendall'):
#Older versions (jython 2.1)
self.emulated_sendall(msg)
else:
if IS_PYTHON_3_ONWARDS:
self.socket.sendall(bytearray(msg, 'utf-8'))
else:
self.socket.sendall(msg)
self.socket.sendall(bytearray(msg, 'utf-8'))
def run(self):
# Echo server program
@ -249,7 +223,6 @@ class CompletionServer:
dbg(SERVER_NAME + ' Connected to java server', INFO1)
while not self.ended:
data = ''
@ -257,10 +230,7 @@ class CompletionServer:
received = self.socket.recv(BUFFER_SIZE)
if len(received) == 0:
raise Exit() # ok, connection ended
if IS_PYTHON_3_ONWARDS:
data = data + received.decode('utf-8')
else:
data = data + received
data = data + received.decode('utf-8')
try:
try:
@ -355,13 +325,13 @@ class CompletionServer:
try:
self.send(msg)
except socket.error:
pass # Ok, may be closed already
pass # Ok, may be closed already
raise e # raise original error.
raise e # raise original error.
except:
dbg(SERVER_NAME + ' exception occurred', ERROR)
s = StringIO.StringIO()
s = StringIO()
traceback.print_exc(file=s)
err = s.getvalue()
@ -370,8 +340,7 @@ class CompletionServer:
try:
self.send(msg)
except socket.error:
pass # Ok, may be closed already
pass # Ok, may be closed already
finally:
log.clear_log()
@ -380,13 +349,12 @@ class CompletionServer:
self.ended = True
raise Exit() # connection broken
except Exit:
if self.exit_process_on_kill:
sys.exit(0)
# No need to log SystemExit error
except:
s = StringIO.StringIO()
s = StringIO()
exc_info = sys.exc_info()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], limit=None, file=s)
@ -395,7 +363,6 @@ class CompletionServer:
raise
if __name__ == '__main__':
port = int(sys.argv[1]) # this is from where we want to receive messages.

View file

@ -17,7 +17,7 @@ import os
import sys
from _pydev_imps._pydev_saved_modules import threading
from _pydevd_bundle.pydevd_constants import INTERACTIVE_MODE_AVAILABLE, dict_keys
from _pydevd_bundle.pydevd_constants import INTERACTIVE_MODE_AVAILABLE
import traceback
from _pydev_bundle import pydev_log
@ -26,16 +26,12 @@ from _pydevd_bundle import pydevd_save_locals
from _pydev_bundle.pydev_imports import Exec, _queue
if sys.version_info[0] >= 3:
import builtins as __builtin__
else:
import __builtin__
import builtins as __builtin__
from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface, BaseStdIn # @UnusedImport
from _pydev_bundle.pydev_console_utils import CodeFragment
IS_PYTHON_3_ONWARDS = sys.version_info[0] >= 3
IS_PY24 = sys.version_info[0] == 2 and sys.version_info[1] == 4
class Command:
@ -138,7 +134,7 @@ def set_debug_hook(debug_hook):
def activate_mpl_if_already_imported(interpreter):
if interpreter.mpl_modules_for_patching:
for module in dict_keys(interpreter.mpl_modules_for_patching):
for module in list(interpreter.mpl_modules_for_patching):
if module in sys.modules:
activate_function = interpreter.mpl_modules_for_patching.pop(module)
activate_function()
@ -176,7 +172,7 @@ def init_mpl_in_console(interpreter):
activate_mpl_if_already_imported(interpreter)
from _pydev_bundle.pydev_import_hook import import_hook_manager
for mod in dict_keys(interpreter.mpl_modules_for_patching):
for mod in list(interpreter.mpl_modules_for_patching):
import_hook_manager.add_module_name(mod, interpreter.mpl_modules_for_patching.pop(mod))
@ -371,10 +367,7 @@ def start_console_server(host, port, interpreter):
from _pydev_bundle.pydev_imports import SimpleXMLRPCServer as XMLRPCServer # @Reimport
try:
if IS_PY24:
server = XMLRPCServer((host, port), logRequests=False)
else:
server = XMLRPCServer((host, port), logRequests=False, allow_none=True)
server = XMLRPCServer((host, port), logRequests=False, allow_none=True)
except:
sys.stderr.write('Error starting server with host: "%s", port: "%s", client_port: "%s"\n' % (host, port, interpreter.client_port))

View file

@ -51,9 +51,9 @@ from _pydevd_bundle.pydevd_comm_constants import (CMD_THREAD_SUSPEND, CMD_STEP_I
CMD_STEP_INTO_MY_CODE, CMD_STEP_OVER, CMD_SMART_STEP_INTO, CMD_RUN_TO_LINE,
CMD_SET_NEXT_STATEMENT, CMD_STEP_RETURN, CMD_ADD_EXCEPTION_BREAK, CMD_STEP_RETURN_MY_CODE,
CMD_STEP_OVER_MY_CODE, constant_to_str, CMD_STEP_INTO_COROUTINE)
from _pydevd_bundle.pydevd_constants import (IS_JYTH_LESS25, get_thread_id, get_current_thread_id,
dict_keys, dict_iter_items, DebugInfoHolder, PYTHON_SUSPEND, STATE_SUSPEND, STATE_RUN, get_frame,
clear_cached_thread_id, INTERACTIVE_MODE_AVAILABLE, SHOW_DEBUG_INFO_ENV, IS_PY34_OR_GREATER, IS_PY2, NULL,
from _pydevd_bundle.pydevd_constants import (get_thread_id, get_current_thread_id,
DebugInfoHolder, PYTHON_SUSPEND, STATE_SUSPEND, STATE_RUN, get_frame,
clear_cached_thread_id, INTERACTIVE_MODE_AVAILABLE, SHOW_DEBUG_INFO_ENV, NULL,
NO_FTRACE, IS_IRONPYTHON, JSON_PROTOCOL, IS_CPYTHON, HTTP_JSON_PROTOCOL, USE_CUSTOM_SYS_CURRENT_FRAMES_MAP, call_only_once,
ForkSafeLock, IGNORE_BASENAMES_STARTING_WITH, EXCEPTION_TYPE_UNHANDLED, SUPPORT_GEVENT)
from _pydevd_bundle.pydevd_defaults import PydevdCustomization # Note: import alias used on pydev_monkey.
@ -111,7 +111,7 @@ if SUPPORT_GEVENT:
if USE_CUSTOM_SYS_CURRENT_FRAMES_MAP:
from _pydevd_bundle.pydevd_constants import constructed_tid_to_last_frame
__version_info__ = (2, 7, 0)
__version_info__ = (2, 8, 0)
__version_info_str__ = []
for v in __version_info__:
__version_info_str__.append(str(v))
@ -161,10 +161,7 @@ def install_breakpointhook(pydevd_breakpointhook=None):
# Install the breakpoint hook at import time.
install_breakpointhook()
SUPPORT_PLUGINS = not IS_JYTH_LESS25
PluginManager = None
if SUPPORT_PLUGINS:
from _pydevd_bundle.pydevd_plugin_utils import PluginManager
from _pydevd_bundle.pydevd_plugin_utils import PluginManager
threadingEnumerate = threading.enumerate
threadingCurrentThread = threading.current_thread
@ -885,11 +882,6 @@ class PyDB(object):
return eval(condition, new_frame.f_globals, new_frame.f_locals)
except Exception as e:
if IS_PY2:
# Must be bytes on py2.
if isinstance(condition, unicode): # noqa
condition = condition.encode('utf-8')
if not isinstance(e, self.skip_print_breakpoint_exception):
sys.stderr.write('Error while evaluating expression: %s\n' % (condition,))
@ -1165,7 +1157,7 @@ class PyDB(object):
return self._threads_suspended_single_notification
def get_plugin_lazy_init(self):
if self.plugin is None and SUPPORT_PLUGINS:
if self.plugin is None:
self.plugin = PluginManager(self)
return self.plugin
@ -1533,7 +1525,7 @@ class PyDB(object):
# import hook and patches for matplotlib support in debug console
from _pydev_bundle.pydev_import_hook import import_hook_manager
if is_current_thread_main_thread():
for module in dict_keys(self.mpl_modules_for_patching):
for module in list(self.mpl_modules_for_patching):
import_hook_manager.add_module_name(module, self.mpl_modules_for_patching.pop(module))
def init_gui_support(self):
@ -1574,7 +1566,7 @@ class PyDB(object):
if len(self.mpl_modules_for_patching) > 0:
if is_current_thread_main_thread(): # Note that we call only in the main thread.
for module in dict_keys(self.mpl_modules_for_patching):
for module in list(self.mpl_modules_for_patching):
if module in sys.modules:
activate_function = self.mpl_modules_for_patching.pop(module, None)
if activate_function is not None:
@ -1775,7 +1767,7 @@ class PyDB(object):
def consolidate_breakpoints(self, canonical_normalized_filename, id_to_breakpoint, file_to_line_to_breakpoints):
break_dict = {}
for _breakpoint_id, pybreakpoint in dict_iter_items(id_to_breakpoint):
for _breakpoint_id, pybreakpoint in id_to_breakpoint.items():
break_dict[pybreakpoint.line] = pybreakpoint
file_to_line_to_breakpoints[canonical_normalized_filename] = break_dict
@ -2016,7 +2008,7 @@ class PyDB(object):
with CustomFramesContainer.custom_frames_lock: # @UndefinedVariable
from_this_thread = []
for frame_custom_thread_id, custom_frame in dict_iter_items(CustomFramesContainer.custom_frames):
for frame_custom_thread_id, custom_frame in CustomFramesContainer.custom_frames.items():
if custom_frame.thread_id == thread.ident:
frames_tracker.track(thread_id, pydevd_frame_utils.create_frames_list_from_frame(custom_frame.frame), frame_custom_thread_id=frame_custom_thread_id)
# print('Frame created as thread: %s' % (frame_custom_thread_id,))
@ -2230,7 +2222,7 @@ class PyDB(object):
try:
def get_pydb_daemon_threads_to_wait():
pydb_daemon_threads = set(dict_keys(self.created_pydb_daemon_threads))
pydb_daemon_threads = set(self.created_pydb_daemon_threads)
pydb_daemon_threads.discard(self.check_alive_thread)
pydb_daemon_threads.discard(threading.current_thread())
return pydb_daemon_threads
@ -2298,7 +2290,7 @@ class PyDB(object):
else:
pydev_log.debug("PyDB.dispose_and_kill_all_pydevd_threads timed out waiting for writer to be empty.")
pydb_daemon_threads = set(dict_keys(self.created_pydb_daemon_threads))
pydb_daemon_threads = set(self.created_pydb_daemon_threads)
for t in pydb_daemon_threads:
if hasattr(t, 'do_kill_pydev_thread'):
pydev_log.debug("PyDB.dispose_and_kill_all_pydevd_threads killing thread: %s", t)
@ -2909,7 +2901,7 @@ def _locked_settrace(
py_db.set_trace_for_frame_and_parents(get_frame().f_back)
with CustomFramesContainer.custom_frames_lock: # @UndefinedVariable
for _frameId, custom_frame in dict_iter_items(CustomFramesContainer.custom_frames):
for _frameId, custom_frame in CustomFramesContainer.custom_frames.items():
py_db.set_trace_for_frame_and_parents(custom_frame.frame)
else:
@ -3374,8 +3366,7 @@ def main():
if setup['save-threading']:
debugger.thread_analyser = ThreadingLogger()
if setup['save-asyncio']:
if IS_PY34_OR_GREATER:
debugger.asyncio_analyser = AsyncioLogger()
debugger.asyncio_analyser = AsyncioLogger()
apply_debugger_options(setup)

View file

@ -4,7 +4,7 @@ from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
from _pydev_imps._pydev_saved_modules import threading
from _pydevd_bundle import pydevd_xml
from _pydevd_bundle.pydevd_constants import GlobalDebuggerHolder
from _pydevd_bundle.pydevd_constants import get_thread_id, IS_PY3K
from _pydevd_bundle.pydevd_constants import get_thread_id
from _pydevd_bundle.pydevd_net_command import NetCommand
from pydevd_concurrency_analyser.pydevd_thread_wrappers import ObjectWrapper, wrap_attr
import pydevd_file_utils
@ -13,10 +13,7 @@ import sys
file_system_encoding = getfilesystemencoding()
try:
from urllib import quote
except:
from urllib.parse import quote # @UnresolvedImport
from urllib.parse import quote
threadingCurrentThread = threading.current_thread
@ -69,7 +66,7 @@ def get_text_list_for_frame(frame):
cmdTextList.append(variables)
cmdTextList.append("</frame>")
curFrame = curFrame.f_back
except :
except:
pydev_log.exception()
return cmdTextList
@ -183,7 +180,7 @@ class ThreadingLogger:
my_back = frame.f_back.f_back
my_thread_id = get_thread_id(my_self_obj)
send_massage = True
if IS_PY3K and hasattr(my_self_obj, "_pydev_join_called"):
if hasattr(my_self_obj, "_pydev_join_called"):
send_massage = False
# we can't detect stop after join in Python 2 yet
if send_massage:

View file

@ -42,7 +42,7 @@ r'''
'''
from _pydev_bundle import pydev_log
from _pydevd_bundle.pydevd_constants import IS_PY2, IS_PY3K, DebugInfoHolder, IS_WINDOWS, IS_JYTHON, \
from _pydevd_bundle.pydevd_constants import DebugInfoHolder, IS_WINDOWS, IS_JYTHON, \
DISABLE_FILE_VALIDATION
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
from _pydevd_bundle.pydevd_comm_constants import file_system_encoding, filesystem_encoding_is_utf8
@ -140,27 +140,19 @@ if sys.platform == 'win32':
def _convert_to_long_pathname(filename):
buf = ctypes.create_unicode_buffer(MAX_PATH)
if IS_PY2 and isinstance(filename, str):
filename = filename.decode(getfilesystemencoding())
rv = GetLongPathName(filename, buf, MAX_PATH)
if rv != 0 and rv <= MAX_PATH:
filename = buf.value
if IS_PY2:
filename = filename.encode(getfilesystemencoding())
return filename
def _convert_to_short_pathname(filename):
buf = ctypes.create_unicode_buffer(MAX_PATH)
if IS_PY2 and isinstance(filename, str):
filename = filename.decode(getfilesystemencoding())
rv = GetShortPathName(filename, buf, MAX_PATH)
if rv != 0 and rv <= MAX_PATH:
filename = buf.value
if IS_PY2:
filename = filename.encode(getfilesystemencoding())
return filename
# Note that we have a cache for previous list dirs... the only case where this may be an
@ -202,9 +194,6 @@ if sys.platform == 'win32':
# consistently (there are settings to disable it on Windows).
# So, using approach which resolves by listing the dir.
if IS_PY2 and isinstance(filename, unicode): # noqa
filename = filename.encode(getfilesystemencoding())
if '~' in filename:
filename = convert_to_long_pathname(filename)
@ -257,8 +246,6 @@ elif IS_JYTHON and IS_WINDOWS:
from java.io import File # noqa
f = File(filename)
ret = f.getCanonicalPath()
if IS_PY2 and not isinstance(ret, str):
return ret.encode(getfilesystemencoding())
return ret
if IS_JYTHON:
@ -574,18 +561,8 @@ except:
def _path_to_expected_str(filename):
if IS_PY2:
if not filesystem_encoding_is_utf8 and hasattr(filename, "decode"):
# filename_in_utf8 is a byte string encoded using the file system encoding
# convert it to utf8
filename = filename.decode(file_system_encoding)
if not isinstance(filename, bytes):
filename = filename.encode('utf-8')
else: # py3
if isinstance(filename, bytes):
filename = filename.decode(file_system_encoding)
if isinstance(filename, bytes):
filename = filename.decode(file_system_encoding)
return filename
@ -691,12 +668,6 @@ def setup_client_server_paths(paths):
# Apply normcase to the existing paths to follow the os preferences.
for i, (path0, path1) in enumerate(paths):
if IS_PY2:
if isinstance(path0, unicode): # noqa
path0 = path0.encode(sys.getfilesystemencoding())
if isinstance(path1, unicode): # noqa
path1 = path1.encode(sys.getfilesystemencoding())
force_only_slash = path0.endswith(('/', '\\')) and path1.endswith(('/', '\\'))
if not force_only_slash:
@ -902,10 +873,7 @@ def get_abs_path_real_path_and_base_from_frame(frame, NORM_PATHS_AND_BASE_CONTAI
def get_fullname(mod_name):
if IS_PY3K:
import pkgutil
else:
from _pydev_imps import _pydev_pkgutil_old as pkgutil
import pkgutil
try:
loader = pkgutil.get_loader(mod_name)
except:

View file

@ -2,7 +2,7 @@ import inspect
from _pydev_bundle import pydev_log
from _pydevd_bundle.pydevd_comm import CMD_SET_BREAK, CMD_ADD_EXCEPTION_BREAK
from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, dict_iter_items, DJANGO_SUSPEND, IS_PY2, \
from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, DJANGO_SUSPEND, \
DebugInfoHolder
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, FCode, just_raised, ignore_exception_trace
from pydevd_file_utils import canonical_normalized_path, absolute_path
@ -241,10 +241,7 @@ def _find_django_render_frame(frame):
def _read_file(filename):
# type: (str) -> str
if IS_PY2:
f = open(filename, 'r')
else:
f = open(filename, 'r', encoding='utf-8', errors='replace')
f = open(filename, 'r', encoding='utf-8', errors='replace')
s = f.read()
f.close()
return s
@ -292,9 +289,6 @@ def _get_source_django_18_or_lower(frame):
def _convert_to_str(s):
if IS_PY2:
if isinstance(s, unicode):
s = s.encode('utf-8')
return s
@ -461,7 +455,7 @@ def has_exception_breaks(plugin):
def has_line_breaks(plugin):
for _canonical_normalized_filename, breakpoints in dict_iter_items(plugin.main_debugger.django_breakpoints):
for _canonical_normalized_filename, breakpoints in plugin.main_debugger.django_breakpoints.items():
if len(breakpoints) > 0:
return True
return False

View file

@ -1,5 +1,4 @@
from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, dict_iter_items, dict_keys, JINJA2_SUSPEND, \
IS_PY2
from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, JINJA2_SUSPEND
from _pydevd_bundle.pydevd_comm import CMD_SET_BREAK, CMD_ADD_EXCEPTION_BREAK
from pydevd_file_utils import canonical_normalized_path
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, FCode
@ -294,9 +293,6 @@ def _get_jinja2_template_line(frame):
def _convert_to_str(s):
if IS_PY2:
if isinstance(s, unicode):
s = s.encode('utf-8', 'replace')
return s
@ -318,7 +314,7 @@ def has_exception_breaks(plugin):
def has_line_breaks(plugin):
for _canonical_normalized_filename, breakpoints in dict_iter_items(plugin.main_debugger.jinja2_breakpoints):
for _canonical_normalized_filename, breakpoints in plugin.main_debugger.jinja2_breakpoints.items():
if len(breakpoints) > 0:
return True
return False
@ -336,20 +332,14 @@ def can_skip(plugin, pydb, frame):
if pydb.jinja2_exception_break:
name = frame.f_code.co_name
if IS_PY2:
if name == 'fail':
module_name = frame.f_globals.get('__name__', '')
if module_name == 'jinja2.parser':
return False
else:
# errors in compile time
if name in ('template', 'top-level template code', '<module>') or name.startswith('block '):
f_back = frame.f_back
module_name = ''
if f_back is not None:
module_name = f_back.f_globals.get('__name__', '')
if module_name.startswith('jinja2.'):
return False
# errors in compile time
if name in ('template', 'top-level template code', '<module>') or name.startswith('block '):
f_back = frame.f_back
module_name = ''
if f_back is not None:
module_name = f_back.f_globals.get('__name__', '')
if module_name.startswith('jinja2.'):
return False
return True
@ -484,7 +474,7 @@ def exception_break(plugin, pydb, pydb_frame, frame, args, arg):
thread = args[3]
exception, value, trace = arg
if pydb.jinja2_exception_break and exception is not None:
exception_type = dict_keys(pydb.jinja2_exception_break)[0]
exception_type = list(pydb.jinja2_exception_break.keys())[0]
if exception.__name__ in ('UndefinedError', 'TemplateNotFound', 'TemplatesNotFound'):
# errors in rendering
render_frame = _find_jinja2_render_frame(frame)
@ -499,35 +489,18 @@ def exception_break(plugin, pydb, pydb_frame, frame, args, arg):
elif exception.__name__ in ('TemplateSyntaxError', 'TemplateAssertionError'):
name = frame.f_code.co_name
if IS_PY2:
if name == 'fail':
module_name = frame.f_globals.get('__name__', '')
if module_name == 'jinja2.parser':
filename = value.filename
lineno = value.lineno
# errors in compile time
if name in ('template', 'top-level template code', '<module>') or name.startswith('block '):
syntax_error_frame = Jinja2TemplateSyntaxErrorFrame(
frame, exception.__name__, filename, lineno, {'name': value.name, 'exception': value})
f_back = frame.f_back
if f_back is not None:
module_name = f_back.f_globals.get('__name__', '')
pydb_frame.set_suspend(thread, CMD_ADD_EXCEPTION_BREAK)
add_exception_to_frame(syntax_error_frame, (exception, value, trace))
thread.additional_info.suspend_type = JINJA2_SUSPEND
thread.additional_info.pydev_message = str(exception_type)
return True, syntax_error_frame
else:
# errors in compile time
if name in ('template', 'top-level template code', '<module>') or name.startswith('block '):
f_back = frame.f_back
if f_back is not None:
module_name = f_back.f_globals.get('__name__', '')
if module_name.startswith('jinja2.'):
# Jinja2 translates exception info and creates fake frame on his own
pydb_frame.set_suspend(thread, CMD_ADD_EXCEPTION_BREAK)
add_exception_to_frame(frame, (exception, value, trace))
thread.additional_info.suspend_type = JINJA2_SUSPEND
thread.additional_info.pydev_message = str(exception_type)
return True, frame
if module_name.startswith('jinja2.'):
# Jinja2 translates exception info and creates fake frame on his own
pydb_frame.set_suspend(thread, CMD_ADD_EXCEPTION_BREAK)
add_exception_to_frame(frame, (exception, value, trace))
thread.additional_info.suspend_type = JINJA2_SUSPEND
thread.additional_info.pydev_message = str(exception_type)
return True, frame
return None

View file

@ -1,5 +1,4 @@
from _pydevd_bundle.pydevd_breakpoints import LineBreakpoint
from _pydevd_bundle.pydevd_constants import dict_items
from _pydevd_bundle.pydevd_api import PyDevdAPI
import bisect
from _pydev_bundle import pydev_log
@ -71,7 +70,7 @@ class ValidationInfo(object):
self._verify_breakpoints_with_lines_collected(py_db, canonical_normalized_filename, template_breakpoints_for_file, valid_lines_frozenset, sorted_lines)
def _verify_breakpoints_with_lines_collected(self, py_db, canonical_normalized_filename, template_breakpoints_for_file, valid_lines_frozenset, sorted_lines):
for line, template_bp in dict_items(template_breakpoints_for_file): # Note: iterate in a copy (we may mutate it).
for line, template_bp in list(template_breakpoints_for_file.items()): # Note: iterate in a copy (we may mutate it).
if template_bp.verified_cache_key != valid_lines_frozenset:
template_bp.verified_cache_key = valid_lines_frozenset
valid = line in valid_lines_frozenset

View file

@ -1,24 +1,17 @@
from _pydevd_bundle.pydevd_constants import get_frame, IS_CPYTHON, IS_64BIT_PROCESS, IS_WINDOWS, \
IS_LINUX, IS_MAC, IS_PY2, DebugInfoHolder, LOAD_NATIVE_LIB_FLAG, \
ENV_FALSE_LOWER_VALUES, GlobalDebuggerHolder, ForkSafeLock
IS_LINUX, IS_MAC, DebugInfoHolder, LOAD_NATIVE_LIB_FLAG, \
ENV_FALSE_LOWER_VALUES, ForkSafeLock
from _pydev_imps._pydev_saved_modules import thread, threading
from _pydev_bundle import pydev_log, pydev_monkey
from os.path import os
import os.path
import platform
try:
import ctypes
except ImportError:
ctypes = None
try:
import cStringIO as StringIO # may not always be available @UnusedImport
except:
try:
import StringIO # @Reimport
except:
import io as StringIO
import sys # @Reimport
from io import StringIO
import sys
import traceback
_original_settrace = sys.settrace
@ -35,7 +28,7 @@ class TracingFunctionHolder:
def get_exception_traceback_str():
exc_info = sys.exc_info()
s = StringIO.StringIO()
s = StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], file=s)
return s.getvalue()
@ -47,7 +40,7 @@ def _get_stack_str(frame):
'\nto see how to restore the debug tracing back correctly.\n'
if TracingFunctionHolder._traceback_limit:
s = StringIO.StringIO()
s = StringIO()
s.write('Call Location:\n')
traceback.print_stack(f=frame, limit=TracingFunctionHolder._traceback_limit, file=s)
msg = msg + s.getvalue()
@ -314,10 +307,7 @@ def set_trace_to_threads(tracing_func, thread_idents=None, create_dummy_thread=T
def _set_ident(self):
# Note: Hack to set the thread ident that we want.
if IS_PY2:
self._Thread__ident = thread_ident
else:
self._ident = thread_ident
self._ident = thread_ident
t = _DummyThread()
# Reset to the base class (don't expose our own version of the class).

View file

@ -4,7 +4,6 @@ Entry point module (keep at root):
Used to run with tests with unittest/pytest/nose.
'''
import os
try:
@ -12,6 +11,7 @@ try:
except:
xrange = range
def main():
import sys
@ -36,9 +36,12 @@ def main():
try:
# Convert to the case stored in the filesystem
import win32api
def get_with_filesystem_case(f):
return win32api.GetLongPathName(win32api.GetShortPathName(f))
except:
def get_with_filesystem_case(f):
return f
@ -106,7 +109,6 @@ def main():
# --tests = Constants.ATTR_UNITTEST_TESTS
# --config-file = Constants.ATTR_UNITTEST_CONFIGURATION_FILE
# The only thing actually handled here are the tests that we want to run, which we'll
# handle and pass as what the test framework expects.
@ -151,7 +153,6 @@ def main():
argv = other_test_framework_params + files_or_dirs
if test_framework == NOSE_FRAMEWORK:
# Nose usage: http://somethingaboutorange.com/mrl/projects/nose/0.11.2/usage.html
# show_stdout_option = ['-s']
@ -219,7 +220,7 @@ def main():
# Workaround bug in py.test: if we pass the full path it ends up importing conftest
# more than once (so, always work with relative paths).
if os.path.isfile(arg) or os.path.isdir(arg):
# Args must be passed with the proper case in the filesystem (otherwise
# python itself may not recognize it).
arg = get_with_filesystem_case(arg)
@ -251,8 +252,7 @@ def main():
# Set what should be skipped in the plugin through an environment variable
s = base64.b64encode(zlib.compress(pickle.dumps(py_test_accept_filter)))
if pydevd_constants.IS_PY3K:
s = s.decode('ascii') # Must be str in py3.
s = s.decode('ascii') # Must be str in py3.
os.environ['PYDEV_PYTEST_SKIP'] = s
# Identifies the main pid (i.e.: if it's not the main pid it has to connect back to the
@ -286,6 +286,7 @@ if __name__ == '__main__':
import traceback
class DumpThreads(threading.Thread):
def run(self):
time.sleep(10)
@ -316,7 +317,6 @@ if __name__ == '__main__':
stack_trace.append('\n=============================== END Thread Dump ===============================')
sys.stderr.write('\n'.join(stack_trace))
dump_current_frames_thread = DumpThreads()
dump_current_frames_thread.daemon = True # Daemon so that this thread doesn't halt it!
dump_current_frames_thread.start()

View file

@ -1,283 +0,0 @@
import os.path
import inspect
import sys
# completion types.
TYPE_IMPORT = '0'
TYPE_CLASS = '1'
TYPE_FUNCTION = '2'
TYPE_ATTR = '3'
TYPE_BUILTIN = '4'
TYPE_PARAM = '5'
def _imp(name, log=None):
try:
return __import__(name)
except:
if '.' in name:
sub = name[0:name.rfind('.')]
if log is not None:
log.AddContent('Unable to import', name, 'trying with', sub)
# log.AddContent('PYTHONPATH:')
# log.AddContent('\n'.join(sorted(sys.path)))
log.AddException()
return _imp(sub, log)
else:
s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path)
if log is not None:
log.AddContent(s)
log.AddException()
raise ImportError(s)
IS_IPY = False
if sys.platform == 'cli':
IS_IPY = True
_old_imp = _imp
def _imp(name, log=None):
# We must add a reference in clr for .Net
import clr # @UnresolvedImport
initial_name = name
while '.' in name:
try:
clr.AddReference(name)
break # If it worked, that's OK.
except:
name = name[0:name.rfind('.')]
else:
try:
clr.AddReference(name)
except:
pass # That's OK (not dot net module).
return _old_imp(initial_name, log)
def GetFile(mod):
f = None
try:
f = inspect.getsourcefile(mod) or inspect.getfile(mod)
except:
try:
f = getattr(mod, '__file__', None)
except:
pass
else:
if f and f.lower(f[-4:]) in ['.pyc', '.pyo']:
filename = f[:-4] + '.py'
if os.path.exists(filename):
f = filename
return f
def Find(name, log=None):
f = None
mod = _imp(name, log)
parent = mod
foundAs = ''
if inspect.ismodule(mod):
f = GetFile(mod)
components = name.split('.')
old_comp = None
for comp in components[1:]:
try:
# this happens in the following case:
# we have mx.DateTime.mxDateTime.mxDateTime.pyd
# but after importing it, mx.DateTime.mxDateTime shadows access to mxDateTime.pyd
mod = getattr(mod, comp)
except AttributeError:
if old_comp != comp:
raise
if inspect.ismodule(mod):
f = GetFile(mod)
else:
if len(foundAs) > 0:
foundAs = foundAs + '.'
foundAs = foundAs + comp
old_comp = comp
return f, mod, parent, foundAs
def GenerateTip(data, log=None):
data = data.replace('\n', '')
if data.endswith('.'):
data = data.rstrip('.')
f, mod, parent, foundAs = Find(data, log)
# print_ >> open('temp.txt', 'w'), f
tips = GenerateImportsTipForModule(mod)
return f, tips
def CheckChar(c):
if c == '-' or c == '.':
return '_'
return c
def GenerateImportsTipForModule(obj_to_complete, dirComps=None, getattr=getattr, filter=lambda name:True):
'''
@param obj_to_complete: the object from where we should get the completions
@param dirComps: if passed, we should not 'dir' the object and should just iterate those passed as a parameter
@param getattr: the way to get a given object from the obj_to_complete (used for the completer)
@param filter: a callable that receives the name and decides if it should be appended or not to the results
@return: list of tuples, so that each tuple represents a completion with:
name, doc, args, type (from the TYPE_* constants)
'''
ret = []
if dirComps is None:
dirComps = dir(obj_to_complete)
if hasattr(obj_to_complete, '__dict__'):
dirComps.append('__dict__')
if hasattr(obj_to_complete, '__class__'):
dirComps.append('__class__')
getCompleteInfo = True
if len(dirComps) > 1000:
# ok, we don't want to let our users wait forever...
# no complete info for you...
getCompleteInfo = False
dontGetDocsOn = (float, int, str, tuple, list)
for d in dirComps:
if d is None:
continue
if not filter(d):
continue
args = ''
try:
obj = getattr(obj_to_complete, d)
except: # just ignore and get it without aditional info
ret.append((d, '', args, TYPE_BUILTIN))
else:
if getCompleteInfo:
retType = TYPE_BUILTIN
# check if we have to get docs
getDoc = True
for class_ in dontGetDocsOn:
if isinstance(obj, class_):
getDoc = False
break
doc = ''
if getDoc:
# no need to get this info... too many constants are defined and
# makes things much slower (passing all that through sockets takes quite some time)
try:
doc = inspect.getdoc(obj)
if doc is None:
doc = ''
except: # may happen on jython when checking java classes (so, just ignore it)
doc = ''
if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj):
try:
args, vargs, kwargs, defaults = inspect.getargspec(obj)
except:
args, vargs, kwargs, defaults = (('self',), None, None, None)
if defaults is not None:
start_defaults_at = len(args) - len(defaults)
r = ''
for i, a in enumerate(args):
if len(r) > 0:
r = r + ', '
r = r + str(a)
if defaults is not None and i >= start_defaults_at:
default = defaults[i - start_defaults_at]
r += '=' +str(default)
others = ''
if vargs:
others += '*' + vargs
if kwargs:
if others:
others+= ', '
others += '**' + kwargs
if others:
r+= ', '
args = '(%s%s)' % (r, others)
retType = TYPE_FUNCTION
elif inspect.isclass(obj):
retType = TYPE_CLASS
elif inspect.ismodule(obj):
retType = TYPE_IMPORT
else:
retType = TYPE_ATTR
# add token and doc to return - assure only strings.
ret.append((d, doc, args, retType))
else: # getCompleteInfo == False
if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj):
retType = TYPE_FUNCTION
elif inspect.isclass(obj):
retType = TYPE_CLASS
elif inspect.ismodule(obj):
retType = TYPE_IMPORT
else:
retType = TYPE_ATTR
# ok, no complete info, let's try to do this as fast and clean as possible
# so, no docs for this kind of information, only the signatures
ret.append((d, '', str(args), retType))
return ret
if __name__ == '__main__':
# To use when we have some object: i.e.: obj_to_complete=MyModel.objects
temp = '''
def %(method_name)s%(args)s:
"""
%(doc)s
"""
'''
for entry in GenerateImportsTipForModule(obj_to_complete):
import textwrap
doc = textwrap.dedent(entry[1])
lines = []
for line in doc.splitlines():
lines.append(' ' + line)
doc = '\n'.join(lines)
print temp % dict(method_name=entry[0], args=entry[2] or '(self)', doc=doc)

View file

@ -1,39 +0,0 @@
#!/usr/bin/python
'''
@author Radim Kubacki
'''
from _pydev_bundle import _pydev_imports_tipper
import traceback
import StringIO
import sys
import urllib
import pycompletionserver
#=======================================================================================================================
# GetImports
#=======================================================================================================================
def GetImports(module_name):
try:
processor = pycompletionserver.Processor()
data = urllib.unquote_plus(module_name)
def_file, completions = _pydev_imports_tipper.GenerateTip(data)
return processor.formatCompletionMessage(def_file, completions)
except:
s = StringIO.StringIO()
exc_info = sys.exc_info()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], limit=None, file=s)
err = s.getvalue()
pycompletionserver.dbg('Received error: ' + str(err), pycompletionserver.ERROR)
raise
#=======================================================================================================================
# main
#=======================================================================================================================
if __name__ == '__main__':
mod_name = sys.argv[1]
print(GetImports(mod_name))

View file

@ -5,17 +5,9 @@ import platform
TEST_CYTHON = os.getenv('PYDEVD_USE_CYTHON', None) == 'YES'
PYDEVD_TEST_VM = os.getenv('PYDEVD_TEST_VM', None)
IS_PY3K = sys.version_info[0] >= 3
IS_PY36_OR_GREATER = sys.version_info[0:2] >= (3, 6)
IS_CPYTHON = platform.python_implementation() == 'CPython'
IS_PY2 = False
if sys.version_info[0] == 2:
IS_PY2 = True
IS_PY26 = sys.version_info[:2] == (2, 6)
IS_PY27 = sys.version_info[:2] == (2, 7)
IS_PY34 = sys.version_info[:2] == (3, 4)
IS_PY36 = False
if sys.version_info[0] == 3 and sys.version_info[1] == 6:
IS_PY36 = True

View file

@ -8,7 +8,6 @@ from tests_python import debugger_unittest
from tests_python.debugger_unittest import (get_free_port, overrides, IS_CPYTHON, IS_JYTHON, IS_IRONPYTHON,
CMD_ADD_DJANGO_EXCEPTION_BREAK, CMD_REMOVE_DJANGO_EXCEPTION_BREAK,
CMD_ADD_EXCEPTION_BREAK, wait_for_condition, IS_PYPY)
from tests_python.debug_constants import IS_PY2
from _pydevd_bundle.pydevd_comm_constants import file_system_encoding
import sys
@ -246,11 +245,7 @@ def case_setup(tmpdir, debugger_runner_simple):
class CaseSetup(object):
check_non_ascii = False
if IS_PY2 and IS_WINDOWS:
# Py2 has some issues converting the non latin1 chars to bytes in windows.
NON_ASCII_CHARS = u'áéíóú'
else:
NON_ASCII_CHARS = u'áéíóú汉字'
NON_ASCII_CHARS = u'áéíóú汉字'
@contextmanager
def test_file(
@ -276,9 +271,6 @@ def case_setup(tmpdir, debugger_runner_simple):
shutil.copyfile(filename, new_filename)
filename = new_filename
if IS_PY2:
filename = filename.encode(file_system_encoding)
WriterThread.TEST_FILE = filename
for key, value in kwargs.items():
assert hasattr(WriterThread, key)

View file

@ -291,8 +291,7 @@ class ReaderThread(threading.Thread):
if SHOW_WRITES_AND_READS:
show_line = line
if IS_PY3K:
show_line = line.decode('utf-8')
show_line = line.decode('utf-8')
print('%s Received %s' % (self.name, show_line,))
@ -311,8 +310,7 @@ class ReaderThread(threading.Thread):
return # Finished communication.
msg = json_contents
if IS_PY3K:
msg = msg.decode('utf-8')
msg = msg.decode('utf-8')
print('Test Reader Thread Received %s' % (msg,))
self._queue.put(msg)
@ -329,9 +327,8 @@ class ReaderThread(threading.Thread):
line = line[:-1]
msg = line
if IS_PY3K:
msg = msg.decode('utf-8')
print('Test Reader Thread Received %s' % (msg,))
msg = msg.decode('utf-8')
print('Test Reader Thread Received %s' % (msg,))
self._queue.put(msg)
except:
@ -369,8 +366,7 @@ def read_process(stream, buffer, debug_stream, stream_name, finish):
if not line:
break
if IS_PY3K:
line = line.decode('utf-8', errors='replace')
line = line.decode('utf-8', errors='replace')
if SHOW_STDOUT:
debug_stream.write('%s: %s' % (stream_name, line,))
@ -742,14 +738,6 @@ class AbstractWriterThread(threading.Thread):
if line.strip().startswith('at '):
return True
if IS_PY26:
# Sometimes in the ci there's an unhandled exception which doesn't have a stack trace
# (apparently this happens when a daemon thread dies during process shutdown).
# This was only reproducible on the ci on Python 2.6, so, ignoring that output on Python 2.6 only.
for expected in (
'Unhandled exception in thread started by <_pydev_bundle.pydev_monkey._NewThreadStartupWithTrace'):
if expected in line:
return True
return False
def additional_output_checks(self, stdout, stderr):
@ -847,8 +835,7 @@ class AbstractWriterThread(threading.Thread):
print('%s.sock not available when sending: %s' % (self, msg))
return
if IS_PY3K:
msg = msg.encode('utf-8')
msg = msg.encode('utf-8')
self.sock.send(msg)
@ -1440,8 +1427,7 @@ class AbstractWriterThread(threading.Thread):
try:
stream = urlopen(full_url)
contents = stream.read()
if IS_PY3K:
contents = contents.decode('utf-8')
contents = contents.decode('utf-8')
self.contents = contents
break
except IOError:

View file

@ -1,13 +1,9 @@
from _pydevd_bundle.pydevd_code_to_source import code_obj_to_source
import pytest
from tests_python.debug_constants import IS_PY26
# i.e.: Skip these tests (this is a work in progress / proof of concept / not ready to be used).
pytestmark = pytest.mark.skip
if IS_PY26: # pytestmark not available in older versions of pytest.
pytest.skip('Work in progress')
def check(obtained, expected, strip_return_none=True):
keepends = False

View file

@ -7,9 +7,7 @@ import traceback
from _pydevd_bundle.pydevd_collect_bytecode_info import collect_try_except_info, \
collect_return_info, code_to_bytecode_representation
from tests_python.debugger_unittest import IS_CPYTHON, IS_PYPY
from tests_python.debug_constants import IS_PY2, IS_PY3K
from _pydevd_bundle.pydevd_constants import IS_PY38_OR_GREATER, IS_JYTHON, IS_PY36_OR_GREATER, \
IS_PY35_OR_GREATER
from _pydevd_bundle.pydevd_constants import IS_PY38_OR_GREATER, IS_JYTHON
def _method_call_with_error():
@ -386,7 +384,6 @@ def test_collect_try_except_info_multiple_except(exc_verifier):
exc_verifier.check(try_except_with, '[{try:1 except 3 end block 8}]')
@pytest.mark.skipif(not IS_PY35_OR_GREATER, reason='Python 3.5 onwards required for async for/async def')
def test_collect_try_except_info_async_for():
# Not valid on Python 2.
@ -460,9 +457,7 @@ def test_collect_return_info():
assert str(collect_return_info(method5.__code__, use_func_first_line=True)) == \
'[{return: 1}]' if IS_PY38_OR_GREATER else '[{return: 3}]'
if not IS_PY2:
# return in generator is not valid for python 2.
code = '''
code = '''
def method():
if a:
yield 1
@ -472,10 +467,10 @@ def method():
pass
'''
scope = {}
exec(code, scope)
assert str(collect_return_info(scope['method'].__code__, use_func_first_line=True)) == \
'[{return: 4}, {return: 6}]'
scope = {}
exec(code, scope)
assert str(collect_return_info(scope['method'].__code__, use_func_first_line=True)) == \
'[{return: 4}, {return: 6}]'
@pytest.mark.skipif(IS_JYTHON, reason='Jython does not have bytecode support.')
@ -624,10 +619,10 @@ def test_simple_code_to_bytecode_cls_method():
def test_simple_code_to_bytecode_repr_unicode():
def method4():
return 'áéíóú'
return 'áéíóú'
new_repr = code_to_bytecode_representation(method4.__code__, use_func_first_line=True)
assert repr('áéíóú') in new_repr
assert repr('áéíóú') in new_repr
def _create_entry(instruction):

View file

@ -12,12 +12,8 @@ from _pydev_bundle.pydev_imports import _queue as queue
from _pydev_bundle.pydev_imports import SimpleXMLRPCServer
import time
import socket
from tests_python.debug_constants import IS_PY2
if IS_PY2:
builtin_qualifier = "__builtin__"
else:
builtin_qualifier = "builtins"
builtin_qualifier = "builtins"
@pytest.fixture

View file

@ -1,6 +1,6 @@
# coding: utf-8
import os.path
from _pydevd_bundle.pydevd_constants import IS_WINDOWS, IS_PY2
from _pydevd_bundle.pydevd_constants import IS_WINDOWS
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
import io
from _pydev_bundle.pydev_log import log_context
@ -54,15 +54,6 @@ def test_convert_utilities(tmpdir):
assert pydevd_file_utils._listdir_cache[os.path.dirname(normalized).lower()] == ['Test_Convert_Utilities']
assert pydevd_file_utils._listdir_cache[(os.path.dirname(normalized).lower(), 'Test_Convert_Utilities'.lower())] == real_case
if IS_PY2:
# Test with unicode in python 2 too.
real_case = pydevd_file_utils.get_path_with_real_case(normalized.decode(
getfilesystemencoding()))
assert isinstance(real_case, str) # bytes on py2, unicode on py3
# Note test_dir itself cannot be compared with because pytest may
# have passed the case normalized.
assert real_case.endswith("Test_Convert_Utilities")
# Check that it works with a shortened path.
shortened = pydevd_file_utils.convert_to_short_pathname(normalized)
assert '~' in shortened

View file

@ -26,7 +26,6 @@ import json
import pydevd_file_utils
import subprocess
import threading
from tests_python.debug_constants import IS_PY26
from _pydev_bundle import pydev_log
try:
from urllib import unquote
@ -39,15 +38,9 @@ pytest_plugins = [
str('tests_python.debugger_fixtures'),
]
try:
xrange
except:
xrange = range
xrange = range
if IS_PY2:
builtin_qualifier = "__builtin__"
else:
builtin_qualifier = "builtins"
builtin_qualifier = "builtins"
@pytest.mark.skipif(not IS_CPYTHON, reason='Test needs gc.get_referrers/reference counting to really check anything.')
@ -1360,10 +1353,7 @@ def test_case_handled_and_unhandled_exception_generator(case_setup, target_file,
if 'generator' in target_file:
expected_frame_names = ['<genexpr>', 'f', '<module>']
else:
if IS_PY27 or IS_PY26:
expected_frame_names = ['f', '<module>']
else:
expected_frame_names = ['<listcomp>', 'f', '<module>']
expected_frame_names = ['<listcomp>', 'f', '<module>']
writer.write_get_current_exception(hit.thread_id)
msg = writer.wait_for_message(accept_message=lambda msg:'exc_type="' in msg and 'exc_desc="' in msg, unquote_msg=False)
@ -1374,15 +1364,10 @@ def test_case_handled_and_unhandled_exception_generator(case_setup, target_file,
writer.write_run_thread(hit.thread_id)
if not unhandled:
if (IS_PY26 or IS_PY27) and 'listcomp' in target_file:
expected_lines = [
writer.get_line_index_with_content('# call exc'),
]
else:
expected_lines = [
writer.get_line_index_with_content('# exc line'),
writer.get_line_index_with_content('# call exc'),
]
expected_lines = [
writer.get_line_index_with_content('# exc line'),
writer.get_line_index_with_content('# call exc'),
]
for expected_line in expected_lines:
hit = writer.wait_for_breakpoint_hit(REASON_CAUGHT_EXCEPTION)
@ -1965,7 +1950,7 @@ def test_case_settrace(case_setup):
writer.finished_ok = True
@pytest.mark.skipif(True or IS_PY26 or IS_JYTHON, reason='This is *very* flaky. Scapy only supports 2.7 onwards, not available for jython.')
@pytest.mark.skipif(True, reason='This is *very* flaky.')
def test_case_scapy(case_setup):
with case_setup.test_file('_debugger_case_scapy.py') as writer:
writer.FORCE_KILL_PROCESS_WHEN_FINISHED_OK = True
@ -2433,14 +2418,11 @@ def test_py_37_breakpoint(case_setup, filename):
def _get_generator_cases():
if IS_PY2:
return ('_debugger_case_generator_py2.py',)
else:
# On py3 we should check both versions.
return (
'_debugger_case_generator_py2.py',
'_debugger_case_generator_py3.py',
)
# On py3 we should check both versions.
return (
'_debugger_case_generator_py2.py',
'_debugger_case_generator_py3.py',
)
@pytest.mark.parametrize("filename", _get_generator_cases())
@ -2653,8 +2635,7 @@ def test_multiprocessing_with_stopped_breakpoints(case_setup_multiprocessing, co
secondary_process_thread_communication.start()
ok = listening_event.wait(timeout=10)
if not IS_PY26:
assert ok
assert ok
writer.write_make_initial_run()
hit2 = writer.wait_for_breakpoint_hit() # Breaks in thread.
writer.write_step_over(hit2.thread_id)
@ -3083,7 +3064,6 @@ def test_trace_dispatch_correct(case_setup):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY26, reason='Failing on Python 2.6 on travis (needs investigation).')
def test_case_single_notification_on_step(case_setup):
from tests_python.debugger_unittest import REASON_STEP_INTO
with case_setup.test_file('_debugger_case_import_main.py') as writer:
@ -3509,7 +3489,6 @@ def test_step_return_my_code(case_setup):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY2, reason='Python 3 onwards required.')
def test_smart_step_into_case1(case_setup):
with case_setup.test_file('_debugger_case_smart_step_into.py') as writer:
line = writer.get_line_index_with_content('break here')
@ -3532,7 +3511,6 @@ def test_smart_step_into_case1(case_setup):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY2, reason='Python 3 onwards required.')
def test_smart_step_into_case2(case_setup):
with case_setup.test_file('_debugger_case_smart_step_into2.py') as writer:
line = writer.get_line_index_with_content('break here')
@ -3561,7 +3539,6 @@ def test_smart_step_into_case2(case_setup):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY2, reason='Python 3 onwards required.')
def test_smart_step_into_case3(case_setup):
with case_setup.test_file('_debugger_case_smart_step_into3.py') as writer:
line = writer.get_line_index_with_content('break here')
@ -3915,12 +3892,10 @@ def test_matplotlib_activation(case_setup):
_GENERATOR_FILES = [
'_debugger_case_generator3.py',
'_debugger_case_generator.py',
'_debugger_case_generator2.py',
]
if not IS_PY2:
_GENERATOR_FILES.append('_debugger_case_generator.py')
_GENERATOR_FILES.append('_debugger_case_generator2.py')
@pytest.mark.parametrize('target_filename', _GENERATOR_FILES)
@pytest.mark.skipif(IS_JYTHON, reason='We do not detect generator returns on Jython.')
@ -4326,8 +4301,7 @@ def test_frame_eval_mode_corner_case_many(case_setup, break_name):
writer.finished_ok = True
if IS_PY3K:
check_shadowed = [
check_shadowed = [
(
u'''
if __name__ == '__main__':
@ -4347,30 +4321,7 @@ if __name__ == '__main__':
'queue.py',
u'raise AssertionError("error on import")'
)
]
else:
check_shadowed = [
(
u'''
if __name__ == '__main__':
import Queue
print(Queue)
''',
'Queue.py',
u'shadowed = True\n'
),
(
u'''
if __name__ == '__main__':
import Queue
print(Queue)
''',
'Queue.py',
u'raise AssertionError("error on import")'
)
]
]
@pytest.mark.parametrize('module_name_and_content', check_shadowed)

View file

@ -21,8 +21,8 @@ from _pydevd_bundle.pydevd_constants import (int_types, IS_64BIT_PROCESS,
PY_VERSION_STR, PY_IMPL_VERSION_STR, PY_IMPL_NAME, IS_PY36_OR_GREATER,
IS_PYPY, GENERATED_LEN_ATTR_NAME, IS_WINDOWS, IS_LINUX, IS_MAC, IS_PY38_OR_GREATER)
from tests_python import debugger_unittest
from tests_python.debug_constants import TEST_CHERRYPY, IS_PY2, TEST_DJANGO, TEST_FLASK, IS_PY26, \
IS_PY27, IS_CPYTHON, TEST_GEVENT, TEST_CYTHON
from tests_python.debug_constants import TEST_CHERRYPY, TEST_DJANGO, TEST_FLASK, \
IS_CPYTHON, TEST_GEVENT, TEST_CYTHON
from tests_python.debugger_unittest import (IS_JYTHON, IS_APPVEYOR, overrides,
get_free_port, wait_for_condition)
from _pydevd_bundle.pydevd_utils import DAPGrouper
@ -136,11 +136,6 @@ class JsonFacade(object):
json_hit = self.get_stack_as_json_hit(stopped_event.body.threadId)
if file is not None:
path = json_hit.stack_trace_response.body.stackFrames[0]['source']['path']
if IS_PY2:
if isinstance(file, bytes):
file = file.decode('utf-8')
if isinstance(path, bytes):
path = path.decode('utf-8')
if not path.endswith(file):
# pytest may give a lowercase tempdir, so, also check with
@ -592,7 +587,6 @@ def test_case_json_logpoint_and_step(case_setup):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY26, reason='Failing on Python 2.6')
def test_case_json_hit_count_and_step(case_setup):
with case_setup.test_file('_debugger_case_hit_count.py') as writer:
json_facade = JsonFacade(writer)
@ -756,7 +750,6 @@ def _check_current_line(json_hit, current_line):
rep))
@pytest.mark.skipif(IS_PY26, reason='Not ok on Python 2.6')
@pytest.mark.parametrize('stop', [False, True])
def test_case_user_unhandled_exception(case_setup, stop):
@ -851,7 +844,6 @@ def test_case_user_unhandled_exception_coroutine(case_setup, stop):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY26, reason='Not ok on Python 2.6')
def test_case_user_unhandled_exception_dont_stop(case_setup):
with case_setup.test_file(
@ -872,7 +864,6 @@ def test_case_user_unhandled_exception_dont_stop(case_setup):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY26, reason='Not ok on Python 2.6')
def test_case_user_unhandled_exception_stop_on_yield(case_setup, pyfile):
@pyfile
@ -1097,10 +1088,7 @@ def test_case_unhandled_exception_generator(case_setup, target_file):
if 'generator' in target_file:
expected_frame_names = ['<genexpr>', 'f', '<module>']
else:
if IS_PY27 or IS_PY26:
expected_frame_names = ['f', '<module>']
else:
expected_frame_names = ['<listcomp>', 'f', '<module>']
expected_frame_names = ['<listcomp>', 'f', '<module>']
frame_names = [f['name'] for f in frames]
assert frame_names == expected_frame_names
@ -1584,7 +1572,6 @@ def test_modules(case_setup):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY26, reason='Python 2.6 does not have an ordered dict')
def test_dict_ordered(case_setup):
with case_setup.test_file('_debugger_case_odict.py') as writer:
json_facade = JsonFacade(writer)
@ -1634,23 +1621,13 @@ def test_stack_and_variables_dict(case_setup):
assert isinstance(dict_variable_reference, int_types)
# : :type variables_response: VariablesResponse
if IS_PY2:
print(repr(variables_response.body.variables[-1]))
expected_unicode = {
u'name': u'\u16a0',
u'value': u"u'\\u16a1'",
u'type': u'unicode',
u'presentationHint': {u'attributes': [u'rawString']},
u'evaluateName': u'\u16a0',
}
else:
expected_unicode = {
'name': u'\u16A0',
'value': "'\u16a1'",
'type': 'str',
'presentationHint': {'attributes': ['rawString']},
'evaluateName': u'\u16A0',
}
expected_unicode = {
'name': u'\u16A0',
'value': "'\u16a1'",
'type': 'str',
'presentationHint': {'attributes': ['rawString']},
'evaluateName': u'\u16A0',
}
assert variables_response.body.variables == [
{'name': 'variable_for_test_1', 'value': '10', 'type': 'int', 'evaluateName': 'variable_for_test_1'},
{'name': 'variable_for_test_2', 'value': '20', 'type': 'int', 'evaluateName': 'variable_for_test_2'},
@ -1687,13 +1664,9 @@ def test_variables_with_same_name(case_setup):
assert isinstance(dict_variable_reference, int_types)
# : :type variables_response: VariablesResponse
if not IS_PY2:
assert variables_response.body.variables == [
{'name': 'td', 'value': "{foo: 'bar', gad: 'zooks', foo: 'bur'}", 'type': 'dict', 'evaluateName': 'td'}
]
else:
# The value may change the representation on Python 2 as dictionaries don't keep the insertion order.
assert len(variables_response.body.variables) == 1
assert variables_response.body.variables == [
{'name': 'td', 'value': "{foo: 'bar', gad: 'zooks', foo: 'bur'}", 'type': 'dict', 'evaluateName': 'td'}
]
dict_variables_response = json_facade.get_variables_response(dict_variable_reference)
# Note that we don't have the evaluateName because it's not possible to create a key
@ -1780,8 +1753,7 @@ def test_hasattr_failure(case_setup):
evaluate_response = json_facade.evaluate('obj', json_hit.frame_id, context='hover')
evaluate_response_body = evaluate_response.body.to_dict()
if not IS_PY2:
assert evaluate_response_body['result'] == 'An exception was raised: RuntimeError()'
assert evaluate_response_body['result'] == 'An exception was raised: RuntimeError()'
json_facade.evaluate('not_there', json_hit.frame_id, context='hover', success=False)
json_facade.evaluate('not_there', json_hit.frame_id, context='watch', success=False)
@ -2057,7 +2029,6 @@ def test_evaluate_block_clipboard(case_setup, pyfile):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY26, reason='__dir__ not customizable on Python 2.6')
def test_exception_on_dir(case_setup):
with case_setup.test_file('_debugger_case_dir_exception.py') as writer:
json_facade = JsonFacade(writer)
@ -2086,9 +2057,6 @@ def test_exception_on_dir(case_setup):
])
@pytest.mark.parametrize('asyncio', [True, False])
def test_return_value_regular(case_setup, scenario, asyncio):
if IS_PY2 and asyncio:
raise pytest.skip('asyncio not available for python 2.')
with case_setup.test_file('_debugger_case_return_value.py' if not asyncio else '_debugger_case_return_value_asyncio.py') as writer:
json_facade = JsonFacade(writer)
@ -2147,10 +2115,7 @@ def test_stack_and_variables_set_and_list(case_setup):
variables_response = json_facade.get_variables_response(json_hit.frame_id)
variables_references = json_facade.pop_variables_reference(variables_response.body.variables)
if IS_PY2:
expected_set = "set(['a'])"
else:
expected_set = "{'a'}"
expected_set = "{'a'}"
assert variables_response.body.variables == [
{'type': 'list', 'evaluateName': 'variable_for_test_1', 'name': 'variable_for_test_1', 'value': "['a', 'b']"},
{'type': 'set', 'evaluateName': 'variable_for_test_2', 'name': 'variable_for_test_2', 'value': expected_set}
@ -2225,45 +2190,17 @@ def test_evaluate_unicode(case_setup):
evaluate_response_body = evaluate_response.body.to_dict()
if IS_PY2:
# The error can be referenced.
variables_reference = json_facade.pop_variables_reference([evaluate_response_body])
assert evaluate_response_body == {
'result': u"SyntaxError('invalid syntax', ('<string>', 1, 1, '\\xe1\\x9a\\xa0'))",
'type': u'SyntaxError',
'presentationHint': {},
}
assert len(variables_reference) == 1
reference = variables_reference[0]
assert reference > 0
variables_response = json_facade.get_variables_response(reference)
child_variables = variables_response.to_dict()['body']['variables']
assert len(child_variables) == 2
for c in child_variables:
if c[u'type'] == u'SyntaxError':
assert c.pop('variablesReference') > 0
assert c == {
u'type': u'SyntaxError',
u'evaluateName': u'\u16a0.result',
u'name': u'result',
u'value': u"SyntaxError('invalid syntax', ('<string>', 1, 1, '\\xe1\\x9a\\xa0'))"
}
else:
assert evaluate_response_body == {
'result': "'\u16a1'",
'type': 'str',
'variablesReference': 0,
'presentationHint': {'attributes': ['rawString']},
}
assert evaluate_response_body == {
'result': "'\u16a1'",
'type': 'str',
'variablesReference': 0,
'presentationHint': {'attributes': ['rawString']},
}
json_facade.write_continue()
writer.finished_ok = True
@pytest.mark.skipif(IS_PY26, reason='Not ok on Python 2.6.')
def test_evaluate_exec_unicode(case_setup):
def get_environ(writer):
@ -2402,7 +2339,7 @@ def test_evaluate_variable_references(case_setup):
assert evaluate_response_body == {
'type': 'set',
'result': "set(['a'])" if IS_PY2 else "{'a'}",
'result': "{'a'}",
'presentationHint': {},
}
assert len(variables_reference) == 1
@ -3804,7 +3741,6 @@ cherrypy.quickstart(HelloWorld())
writer.finished_ok = True
@pytest.mark.skipif(IS_PY26, reason='Flaky on Python 2.6.')
def test_wait_for_attach(case_setup_remote_attach_to):
host_port = get_socket_name(close=True)
@ -4039,9 +3975,6 @@ def test_path_translation_and_source_reference(case_setup):
translated_dir_not_ascii = u'áéíóú汉字'
if IS_PY2:
translated_dir_not_ascii = translated_dir_not_ascii.encode(file_system_encoding)
def get_file_in_client(writer):
# Instead of using: test_python/_debugger_case_path_translation.py
# we'll set the breakpoints at foo/_debugger_case_path_translation.py
@ -4064,8 +3997,6 @@ def test_path_translation_and_source_reference(case_setup):
bp_line = writer.get_line_index_with_content('break here')
assert writer.TEST_FILE.endswith('_debugger_case_path_translation.py')
local_root = os.path.dirname(get_file_in_client(writer))
if IS_PY2:
local_root = local_root.decode(file_system_encoding).encode('utf-8')
json_facade.write_launch(pathMappings=[{
'localRoot': local_root,
'remoteRoot': os.path.dirname(writer.TEST_FILE),
@ -4092,11 +4023,6 @@ def test_path_translation_and_source_reference(case_setup):
path = stack_frame['source']['path']
file_in_client_unicode = file_in_client
if IS_PY2:
if isinstance(path, bytes):
path = path.decode('utf-8')
if isinstance(file_in_client_unicode, bytes):
file_in_client_unicode = file_in_client_unicode.decode(file_system_encoding)
assert path == file_in_client_unicode
source_reference = stack_frame['source']['sourceReference']
@ -4569,11 +4495,6 @@ def test_redirect_output(case_setup):
output_event = json_facade.wait_for_json_message(OutputEvent)
output = output_event.body.output
category = output_event.body.category
if IS_PY2:
if isinstance(output, unicode): # noqa -- unicode not available in py3
output = output.encode('utf-8')
if isinstance(category, unicode): # noqa -- unicode not available in py3
category = category.encode('utf-8')
msg = (output, category)
except Exception:
for msg in msgs:
@ -4761,7 +4682,6 @@ def test_subprocess_pydevd_customization(case_setup_remote, command_line_args):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY26, reason='Only Python 2.7 onwards.')
def test_subprocess_then_fork(case_setup_multiprocessing):
import threading
from tests_python.debugger_unittest import AbstractWriterThread
@ -5445,7 +5365,6 @@ def test_variable_presentation(case_setup, var_presentation, check_func):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY26, reason='Only Python 2.7 onwards.')
def test_debugger_case_deadlock_thread_eval(case_setup):
def get_environ(self):
@ -5469,7 +5388,6 @@ def test_debugger_case_deadlock_thread_eval(case_setup):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY26, reason='Only Python 2.7 onwards.')
def test_debugger_case_breakpoint_on_unblock_thread_eval(case_setup):
from _pydevd_bundle._debug_adapter.pydevd_schema import EvaluateResponse
@ -5509,7 +5427,6 @@ def test_debugger_case_breakpoint_on_unblock_thread_eval(case_setup):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY26, reason='Only Python 2.7 onwards.')
def test_debugger_case_unblock_manually(case_setup):
from _pydevd_bundle._debug_adapter.pydevd_schema import EvaluateResponse
@ -5545,7 +5462,6 @@ def test_debugger_case_unblock_manually(case_setup):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY26, reason='Only Python 2.7 onwards.')
def test_debugger_case_deadlock_notify_evaluate_timeout(case_setup, pyfile):
@pyfile
@ -5582,7 +5498,6 @@ def test_debugger_case_deadlock_notify_evaluate_timeout(case_setup, pyfile):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY26, reason='Only Python 2.7 onwards.')
def test_debugger_case_deadlock_interrupt_thread(case_setup, pyfile):
@pyfile
@ -5792,7 +5707,6 @@ def do_something():
writer.finished_ok = True
@pytest.mark.skipif(IS_PY2, reason='Python 3 onwards required.')
def test_step_into_target_basic(case_setup):
with case_setup.test_file('_debugger_case_smart_step_into.py') as writer:
json_facade = JsonFacade(writer)
@ -5817,7 +5731,6 @@ def test_step_into_target_basic(case_setup):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY2, reason='Python 3 onwards required.')
def test_step_into_target_multiple(case_setup):
with case_setup.test_file('_debugger_case_smart_step_into2.py') as writer:
json_facade = JsonFacade(writer)
@ -5842,7 +5755,6 @@ def test_step_into_target_multiple(case_setup):
writer.finished_ok = True
@pytest.mark.skipif(IS_PY2, reason='Python 3 onwards required.')
def test_step_into_target_genexpr(case_setup):
with case_setup.test_file('_debugger_case_smart_step_into3.py') as writer:
json_facade = JsonFacade(writer)

View file

@ -1,9 +1,6 @@
def test_dump_threads():
import pydevd
try:
from StringIO import StringIO
except:
from io import StringIO
from io import StringIO
stream = StringIO()
pydevd.dump_threads(stream=stream)
contents = stream.getvalue()

View file

@ -2,18 +2,13 @@
from __future__ import unicode_literals
from _pydev_bundle._pydev_completer import (isidentifier, extract_token_and_qualifier,
TokenAndQualifier)
from _pydevd_bundle.pydevd_constants import IS_PY2
def test_isidentifier():
assert isidentifier('abc')
assert not isidentifier('<')
assert not isidentifier('')
if IS_PY2:
# Py3 accepts unicode identifiers
assert not isidentifier('áéíóú')
else:
assert isidentifier('áéíóú')
assert isidentifier('áéíóú')
def test_extract_token_and_qualifier():

View file

@ -1,7 +1,6 @@
import json
from tests_python.debugger_unittest import ReaderThread, IS_JYTHON
from tests_python.debug_constants import IS_PY3K
import pytest
import socket
from _pydev_bundle import pydev_localhost
@ -29,7 +28,7 @@ class _DummySocket(object):
return self._sock_for_reader_thread.recv(*args, **kwargs)
def put(self, msg):
if IS_PY3K and not isinstance(msg, bytes):
if not isinstance(msg, bytes):
msg = msg.encode('utf-8')
if self._sock_for_fixture_test is None:

View file

@ -7,7 +7,6 @@ import pytest
from _pydev_bundle.pydev_monkey import pydev_src_dir
from _pydevd_bundle.pydevd_constants import sorted_dict_repr
from pydevd import SetupHolder
from tests_python.debug_constants import IS_PY2
try:
from _pydev_bundle import pydev_monkey
@ -477,13 +476,8 @@ def test_monkey_patch_c_program_arg(use_bytes):
encode = lambda s:s
if use_bytes:
if not IS_PY2:
check = [c.encode('utf-8') for c in check]
encode = lambda s:s.encode('utf-8')
else:
if IS_PY2:
check = [c.decode('utf-8') for c in check]
encode = lambda s:s.decode('utf-8')
check = [c.encode('utf-8') for c in check]
encode = lambda s:s.encode('utf-8')
assert pydev_monkey.patch_args(check) == [
encode('C:\\bin\\python.exe'),

View file

@ -1,4 +1,3 @@
from tests_python.debug_constants import IS_PY2
from _pydevd_bundle.pydevd_constants import IS_PY36_OR_GREATER, GENERATED_LEN_ATTR_NAME
@ -19,9 +18,6 @@ def test_dict_resolver():
assert contents_debug_adapter_protocol == [
('(1, 2)', 2, '[(1, 2)]'), ("'22'", 22, "['22']")]
elif IS_PY2:
assert contents_debug_adapter_protocol == [
('(1, 2)', 2, '[(1, 2)]'), (u"u'22'", 22, u"[u'22']")]
else:
assert contents_debug_adapter_protocol == [
("'22'", 22, "['22']"), ('(1, 2)', 2, '[(1, 2)]')]
@ -117,22 +113,11 @@ def test_object_resolver__dict__non_strings():
obj = MyObject()
dictionary = clear_contents_dictionary(default_resolver.get_dictionary(obj))
if IS_PY2:
assert 'attribute name must be string' in dictionary.pop('(1, 2)')
assert dictionary == {}
else:
assert dictionary == {'(1, 2)': (3, 4)}
assert dictionary == {'(1, 2)': (3, 4)}
contents_debug_adapter_protocol = clear_contents_debug_adapter_protocol(
default_resolver.get_contents_debug_adapter_protocol(obj))
if IS_PY2:
assert len(contents_debug_adapter_protocol) == 1
entry = contents_debug_adapter_protocol[0]
assert entry[0] == '(1, 2)'
assert 'attribute name must be string' in entry[1]
assert entry[2] == '.(1, 2)'
else:
assert contents_debug_adapter_protocol == [('(1, 2)', (3, 4), '.__dict__[(1, 2)]')]
assert contents_debug_adapter_protocol == [('(1, 2)', (3, 4), '.__dict__[(1, 2)]')]
def test_django_forms_resolver():
@ -148,19 +133,11 @@ def test_django_forms_resolver():
obj = MyObject()
dictionary = clear_contents_dictionary(django_form_resolver.get_dictionary(obj))
if IS_PY2:
assert 'attribute name must be string' in dictionary.pop('(1, 2)')
assert dictionary == {'errors': None}
else:
assert dictionary == {'(1, 2)': (3, 4), 'errors': None}
assert dictionary == {'(1, 2)': (3, 4), 'errors': None}
obj._errors = 'bar'
dictionary = clear_contents_dictionary(django_form_resolver.get_dictionary(obj))
if IS_PY2:
assert 'attribute name must be string' in dictionary.pop('(1, 2)')
assert dictionary == {'errors': 'bar', '_errors': 'bar'}
else:
assert dictionary == {'(1, 2)': (3, 4), 'errors': 'bar', '_errors': 'bar'}
assert dictionary == {'(1, 2)': (3, 4), 'errors': 'bar', '_errors': 'bar'}
def clear_contents_debug_adapter_protocol(contents_debug_adapter_protocol):

View file

@ -29,12 +29,8 @@ def test_run(testdir_or_pytester):
import sys
import os
if debugger_unittest.IS_PY3K:
foo_dir = debugger_unittest._get_debugger_test_file(os.path.join('resources', 'launch', 'foo'))
foo_module = 'tests_python.resources.launch.foo'
else:
foo_dir = debugger_unittest._get_debugger_test_file(os.path.join('resources', 'launch_py2', 'foo'))
foo_module = 'tests_python.resources.launch_py2.foo'
foo_dir = debugger_unittest._get_debugger_test_file(os.path.join('resources', 'launch', 'foo'))
foo_module = 'tests_python.resources.launch.foo'
pydevd_dir = os.path.dirname(os.path.dirname(__file__))
assert os.path.exists(os.path.join(pydevd_dir, 'pydevd.py'))

View file

@ -5,7 +5,7 @@ import re
import pytest
from _pydevd_bundle.pydevd_safe_repr import SafeRepr
import json
from _pydevd_bundle.pydevd_constants import IS_JYTHON, IS_PY2, IS_PY36_OR_GREATER
from _pydevd_bundle.pydevd_constants import IS_JYTHON, IS_PY36_OR_GREATER
try:
import numpy as np
@ -604,98 +604,6 @@ class TestNumpy(SafeReprTestBase):
self.assert_unchanged(value, repr(value))
@pytest.mark.parametrize('params', [
# In python 2, unicode slicing may or may not work well depending on whether it's a ucs-2 or
# ucs-4 build (so, we have to strip the high-surrogate if it's ucs-2 and the number of chars
# will be different).
{'maxother_outer': 20, 'input': u"😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄F😄FF😄F", 'output': (u"😄😄😄😄😄😄...FF😄F", u"😄😄😄😄😄😄😄😄😄😄😄😄😄...F😄FF😄F"), 'output_str': ("u'\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\ud83d...\\ude04FF\\U0001f604F'", "u'\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604...F\\U0001f604FF\\U0001f604F'")},
{'maxother_outer': 20, 'input': u"😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄FFFFFFFF", 'output': (u"😄😄😄😄😄😄...FFFFFF", u"😄😄😄😄😄😄😄😄😄😄😄😄😄...FFFFFF"), 'output_str': ("u'\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\ud83d...FFFFFF'", "u'\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604...FFFFFF'")},
{'maxother_outer': 20, 'input': u"🌐🌐🌐🌐🌐🌐🌐🌐🌐🌐🌐🌐🌐🌐🌐🌐🌐🌐FFFFFFFF", 'output': (u"🌐🌐🌐🌐🌐🌐...FFFFFF", u"🌐🌐🌐🌐🌐🌐🌐🌐🌐🌐🌐🌐🌐...FFFFFF"), 'output_str': ("u'\\U0001f310\\U0001f310\\U0001f310\\U0001f310\\U0001f310\\U0001f310\\ud83c...FFFFFF'", "u'\\U0001f310\\U0001f310\\U0001f310\\U0001f310\\U0001f310\\U0001f310\\U0001f310\\U0001f310\\U0001f310\\U0001f310\\U0001f310\\U0001f310\\U0001f310...FFFFFF'")},
{'maxother_outer': 10, 'input': u"😄😄😄😄😄😄😄😄😄FFFFFFFF", 'output': (u"😄😄😄...FFF", u"😄😄😄😄😄😄...FFF"), 'output_str': ("u'\\U0001f604\\U0001f604\\U0001f604...FFF'", "u'\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604\\U0001f604...FFF'")},
{'maxother_outer': 10, 'input': u"🌐🌐🌐🌐🌐🌐🌐🌐🌐FFFFFFFF", 'output': (u"🌐🌐🌐...FFF", u"🌐🌐🌐🌐🌐🌐...FFF"), 'output_str': ("u'\\U0001f310\\U0001f310\\U0001f310...FFF'", "u'\\U0001f310\\U0001f310\\U0001f310\\U0001f310\\U0001f310\\U0001f310...FFF'")},
# Regular unicode
{'maxother_outer': 20, 'input': u"ωωωωωωωωωωωωωωωωωωωωωωωFFFFFFFF", 'output': u"ωωωωωωωωωωωωω...FFFFFF", 'output_str': repr(u"ωωωωωωωωωωωωω...FFFFFF")},
{'maxother_outer': 10, 'input': u"<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>FFFFFFFF", 'output': u"<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>...FFF", 'output_str': repr(u"<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>...FFF")},
# Note: as unicode directly doesn't reach the limit and is not elided.
{'maxother_outer': 20, 'input': u"<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>FFFFFFFF", 'output': u"<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>F...FFFFFF", 'output_str': repr(u"<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>FFFFFFFF")},
# Note that we actually get the repr() in this case as we can't decode it with any of the available encodings.
{'maxother_outer': 10, 'input': b'\xed\xbd\xbf\xff\xfe\xfa\xfd' * 10, 'output': b"'\\xed\\...fd'", 'output_str': "'\\xed\\xbd\\xbf\\xff\\xfe\\xfa...\\xfe\\xfa\\xfd'"},
{'maxother_outer': 20, 'input': b'\xed\xbd\xbf\xff\xfe\xfa\xfd' * 10, 'output': b"'\\xed\\xbd\\xbf...a\\xfd'", 'output_str': "'\\xed\\xbd\\xbf\\xff\\xfe\\xfa\\xfd\\xed\\xbd\\xbf\\xff\\xfe\\xfa...\\xbd\\xbf\\xff\\xfe\\xfa\\xfd'"},
# Check that we use repr() even if it fits the maxother_outer limit.
{'maxother_outer': 100, 'input': b'\xed\xbd\xbf\xff\xfe\xfa\xfd', 'output': "'\\xed\\xbd\\xbf\\xff\\xfe\\xfa\\xfd'", 'output_str': repr(b'\xed\xbd\xbf\xff\xfe\xfa\xfd')},
# Note that with latin1 encoding we can actually decode the string but when encoding back to utf-8 we have garbage
# (couldn't find a good approach to know what to do here as we've actually been able to decode it as
# latin-1 because it's a very permissive encoding).
{
'maxother_outer': 10,
'sys_stdout_encoding': 'latin1',
'input': b'\xed\xbd\xbf\xff\xfe\xfa\xfd' * 10,
'output': b'\xc3\xad\xc2\xbd\xc2\xbf\xc3\xbf\xc3\xbe\xc3\xba...\xc3\xbe\xc3\xba\xc3\xbd',
'output_str': "\'\\xed\\xbd\\xbf\\xff\\xfe\\xfa...\\xfe\\xfa\\xfd\'",
},
])
@pytest.mark.parametrize('use_str', [True, False])
@pytest.mark.skipif(not IS_PY2, reason='Py2 specific test.')
def test_py2_bytes_slicing(params, use_str):
safe_repr = SafeRepr()
safe_repr.locale_preferred_encoding = 'ascii'
safe_repr.sys_stdout_encoding = params.get('sys_stdout_encoding', 'ascii')
safe_repr.maxother_outer = params['maxother_outer']
# This is the encoding that we expect back (because json needs to be able to encode it
# later on, so, the return from SafeRepr must always be utf-8 regardless of the input).
encoding = 'utf-8'
if not use_str:
class MyObj(object):
def __repr__(self):
ret = params['input']
if isinstance(ret, unicode):
ret = ret.encode(encoding)
return ret
safe_repr_input = MyObj()
else:
safe_repr_input = params['input']
computed = safe_repr(safe_repr_input)
if use_str:
expected_output = params['output_str']
else:
expected_output = params['output']
expect_unicode = False
if isinstance(expected_output, unicode):
expect_unicode = True
if isinstance(expected_output, tuple) and isinstance(expected_output[0], unicode):
expect_unicode = True
if expect_unicode:
computed = computed.decode(encoding)
if isinstance(expected_output, tuple):
assert computed in expected_output
else:
assert computed == expected_output
else:
if isinstance(expected_output, tuple):
assert computed in expected_output
else:
assert computed == expected_output
# Check that we can json-encode the return.
assert json.dumps(computed)
@pytest.mark.parametrize('params', [
{'maxother_outer': 20, 'input': "😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄FFFFFFFF", 'output': '😄😄😄😄😄😄😄😄😄😄😄😄😄...FFFFFF'},
{'maxother_outer': 10, 'input': "😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄😄FFFFFFFF", 'output': '😄😄😄😄😄😄...FFF'},
@ -703,7 +611,6 @@ def test_py2_bytes_slicing(params, use_str):
# Because we can't return bytes, byte-related tests aren't needed (and str works as it should).
])
@pytest.mark.skipif(IS_PY2, reason='Py3 specific test')
@pytest.mark.parametrize('use_str', [True, False])
def test_py3_str_slicing(params, use_str):
# Note: much simpler in python because __repr__ is required to return str
@ -741,10 +648,7 @@ def test_raw_bytes():
obj = b'\xed\xbd\xbf\xff\xfe\xfa\xfd'
raw_value_repr = safe_repr(obj)
assert isinstance(raw_value_repr, str) # bytes on py2, str on py3
if IS_PY2:
assert raw_value_repr == obj.decode('latin1').encode('utf-8')
else:
assert raw_value_repr == obj.decode('latin1')
assert raw_value_repr == obj.decode('latin1')
def test_raw_unicode():
@ -753,10 +657,7 @@ def test_raw_unicode():
obj = u'\xed\xbd\xbf\xff\xfe\xfa\xfd'
raw_value_repr = safe_repr(obj)
assert isinstance(raw_value_repr, str) # bytes on py2, str on py3
if IS_PY2:
assert raw_value_repr == obj.encode('utf-8')
else:
assert raw_value_repr == obj
assert raw_value_repr == obj
def test_no_repr():

View file

@ -1,6 +1,5 @@
import pytest
import sys
from tests_python.test_debugger import IS_PY26, IS_PY34
from _pydevd_bundle.pydevd_constants import NO_FTRACE
from tests_python.debugger_unittest import IS_JYTHON

View file

@ -1,12 +1,11 @@
import threading
from _pydevd_bundle.pydevd_utils import convert_dap_log_message_to_expression
from tests_python.debug_constants import IS_PY26, IS_PY3K, TEST_GEVENT, IS_CPYTHON
from tests_python.debug_constants import TEST_GEVENT, IS_CPYTHON
import sys
from _pydevd_bundle.pydevd_constants import IS_WINDOWS, IS_PY2, IS_PYPY, IS_JYTHON
from _pydevd_bundle.pydevd_constants import IS_WINDOWS, IS_PYPY, IS_JYTHON
import pytest
import os
import codecs
from _pydevd_bundle.pydevd_thread_lifecycle import pydevd_find_thread_by_id
@ -19,11 +18,8 @@ def test_expression_to_evaluate():
assert _expression_to_evaluate(b' for a in b:\nfoo') == b' for a in b:\nfoo'
assert _expression_to_evaluate(b'\tfor a in b:\n\t\tfoo') == b'for a in b:\n\tfoo'
if IS_PY2:
assert _expression_to_evaluate(u' expr') == (codecs.BOM_UTF8 + b'expr')
else:
assert _expression_to_evaluate(u' expr') == u'expr'
assert _expression_to_evaluate(u' for a in expr:\n pass') == u'for a in expr:\npass'
assert _expression_to_evaluate(u' expr') == u'expr'
assert _expression_to_evaluate(u' for a in expr:\n pass') == u'for a in expr:\npass'
@pytest.mark.skipif(IS_WINDOWS, reason='Brittle on Windows.')
@ -88,10 +84,7 @@ conftest.py:67: AssertionError
error_msg += 'Current main thread not instance of: %s (%s)' % (
threading._MainThread, current_thread.__class__.__mro__,)
try:
from StringIO import StringIO
except:
from io import StringIO
from io import StringIO
stream = StringIO()
dump_threads(stream=stream)
@ -162,13 +155,11 @@ def test_convert_dap_log_message_to_expression():
'a (22, 33)} 2'
)
if not IS_PY26:
# Note: set literal not valid for Python 2.6.
assert check_dap_log_message(
'a {{1: {1}}}',
"'a %s' % ({1: {1}},)",
'a {1: {1}}' if IS_PY3K else 'a {1: set([1])}',
)
assert check_dap_log_message(
'a {{1: {1}}}',
"'a %s' % ({1: {1}},)",
'a {1: {1}}'
)
# Error condition.
assert check_dap_log_message(
@ -180,10 +171,7 @@ def test_convert_dap_log_message_to_expression():
def test_pydevd_log():
from _pydev_bundle import pydev_log
try:
import StringIO as io
except:
import io
import io
from _pydev_bundle.pydev_log import log_context
stream = io.StringIO()
@ -242,10 +230,7 @@ def test_pydevd_logging_files(tmpdir):
import os.path
from _pydev_bundle.pydev_log import _LoggingGlobals
try:
import StringIO as io
except:
import io
import io
from _pydev_bundle.pydev_log import log_context
stream = io.StringIO()
@ -427,13 +412,10 @@ def test_find_main_thread_id():
def test_get_ppid():
from _pydevd_bundle.pydevd_api import PyDevdAPI
api = PyDevdAPI()
if IS_PY3K:
# On python 3 we can check that our internal api which is used for Python 2 gives the
# same result as os.getppid.
ppid = os.getppid()
assert api._get_windows_ppid() == ppid
else:
assert api._get_windows_ppid() is not None
# On python 3 we can check that our internal api which is used for Python 2 gives the
# same result as os.getppid.
ppid = os.getppid()
assert api._get_windows_ppid() == ppid
def _check_gevent(expect_msg):

View file

@ -1,18 +1,15 @@
import os.path
import sys
IS_PY26 = sys.version_info[:2] == (2, 6)
IS_JYTHON = sys.platform.find('java') != -1
try:
this_file_name = __file__
except NameError:
# stupid jython. plain old __file__ isnt working for some reason
import test_runfiles #@UnresolvedImport - importing the module itself
import test_runfiles # @UnresolvedImport - importing the module itself
this_file_name = test_runfiles.__file__
desired_runfiles_path = os.path.normpath(os.path.dirname(this_file_name) + "/..")
sys.path.insert(0, desired_runfiles_path)
@ -20,13 +17,12 @@ from _pydev_runfiles import pydev_runfiles_unittest
from _pydev_runfiles import pydev_runfiles_xml_rpc
from _pydevd_bundle import pydevd_io
#remove existing pydev_runfiles from modules (if any), so that we can be sure we have the correct version
# remove existing pydev_runfiles from modules (if any), so that we can be sure we have the correct version
if 'pydev_runfiles' in sys.modules:
del sys.modules['pydev_runfiles']
if '_pydev_runfiles.pydev_runfiles' in sys.modules:
del sys.modules['_pydev_runfiles.pydev_runfiles']
from _pydev_runfiles import pydev_runfiles
import unittest
import tempfile
@ -37,7 +33,7 @@ try:
except:
from sets import Set as set
#this is an early test because it requires the sys.path changed
# this is an early test because it requires the sys.path changed
orig_syspath = sys.path
a_file = pydev_runfiles.__file__
pydev_runfiles.PydevTestRunner(pydev_runfiles.Configuration(files_or_dirs=[a_file]))
@ -45,9 +41,10 @@ file_dir = os.path.dirname(os.path.dirname(a_file))
assert file_dir in sys.path
sys.path = orig_syspath[:]
#remove it so that we leave it ok for other tests
# remove it so that we leave it ok for other tests
sys.path.remove(desired_runfiles_path)
class RunfilesTest(unittest.TestCase):
def _setup_scenario(
@ -81,7 +78,6 @@ class RunfilesTest(unittest.TestCase):
self.file_dir = [os.path.abspath(os.path.join(desired_runfiles_path, 'tests_runfiles/samples'))]
self._setup_scenario(self.file_dir, None)
def test_suite_used(self):
for suite in self.all_tests + self.filtered_tests:
self.assertTrue(isinstance(suite, pydev_runfiles_unittest.PydevTestSuite))
@ -134,7 +130,6 @@ class RunfilesTest(unittest.TestCase):
configuration = pydev_runfiles.parse_cmdline()
self.assertEqual(['*__todo', 'test*bar'], configuration.exclude_tests)
def test___adjust_python_path_works_for_directories(self):
orig_syspath = sys.path
tempdir = tempfile.gettempdir()
@ -142,7 +137,6 @@ class RunfilesTest(unittest.TestCase):
self.assertEqual(1, tempdir in sys.path)
sys.path = orig_syspath[:]
def test___is_valid_py_file(self):
isvalid = self.MyTestRunner._PydevTestRunner__is_valid_py_file
self.assertEqual(1, isvalid("test.py"))
@ -201,18 +195,13 @@ class RunfilesTest(unittest.TestCase):
for t in tests:
total += t.countTestCases()
return total
def test_runfile_imports(self):
from _pydev_runfiles import pydev_runfiles_coverage
from _pydev_runfiles import pydev_runfiles_parallel_client
from _pydev_runfiles import pydev_runfiles_parallel
import pytest
if IS_PY26:
with pytest.raises(AssertionError) as e:
from _pydev_runfiles import pydev_runfiles_pytest2
assert 'Please upgrade pytest' in str(e)
else:
from _pydev_runfiles import pydev_runfiles_pytest2
from _pydev_runfiles import pydev_runfiles_pytest2
from _pydev_runfiles import pydev_runfiles_unittest
from _pydev_runfiles import pydev_runfiles_xml_rpc
from _pydev_runfiles import pydev_runfiles
@ -277,7 +266,6 @@ class RunfilesTest(unittest.TestCase):
filtered_tests = self.MyTestRunner.filter_tests(self.all_tests)
self.assertEqual(self.count_suite(filtered_tests), 0)
self._setup_scenario(self.file_dir, None, exclude_tests=['*a*'])
filtered_tests = self.MyTestRunner.filter_tests(self.all_tests)
self.assertEqual(self.count_suite(filtered_tests), 6)
@ -320,31 +308,30 @@ class RunfilesTest(unittest.TestCase):
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'samples'))
notifications = []
class Server:
def __init__(self, notifications):
self.notifications = notifications
def notifyConnected(self):
#This method is called at the very start (in runfiles.py), and we do not check this here
# This method is called at the very start (in runfiles.py), and we do not check this here
raise AssertionError('Should not be called from the run tests.')
def notifyTestsCollected(self, number_of_tests):
self.notifications.append(('notifyTestsCollected', number_of_tests))
def notifyStartTest(self, file, test):
pass
def notifyTest(self, cond, captured_output, error_contents, file, test, time):
try:
#I.e.: when marked as Binary in xml-rpc
# I.e.: when marked as Binary in xml-rpc
captured_output = captured_output.data
except:
pass
try:
#I.e.: when marked as Binary in xml-rpc
# I.e.: when marked as Binary in xml-rpc
error_contents = error_contents.data
except:
pass
@ -380,13 +367,13 @@ class RunfilesTest(unittest.TestCase):
if sys.version_info[:2] <= (2, 6):
# The setUpClass is not supported in Python 2.6 (thus we have no collection error).
expected = [
('notifyTest', 'fail', '', 'AssertionError: Fail test 2', simple_test, 'SampleTest.test_xxxxxx1'),
('notifyTest', 'ok', '', '', simple_test2, 'YetAnotherSampleTest.test_abc'),
('notifyTest', 'ok', '', '', simpleClass_test, 'SetUpClassTest.test_blank'),
('notifyTest', 'ok', '', '', simpleModule_test, 'SetUpModuleTest.test_blank'),
('notifyTest', 'ok', '', '', simple_test, 'SampleTest.test_xxxxxx2'),
('notifyTest', 'ok', 'non unique name ran', '', simple_test, 'SampleTest.test_non_unique_name'),
('notifyTestRunFinished',),
('notifyTest', 'fail', '', 'AssertionError: Fail test 2', simple_test, 'SampleTest.test_xxxxxx1'),
('notifyTest', 'ok', '', '', simple_test2, 'YetAnotherSampleTest.test_abc'),
('notifyTest', 'ok', '', '', simpleClass_test, 'SetUpClassTest.test_blank'),
('notifyTest', 'ok', '', '', simpleModule_test, 'SetUpModuleTest.test_blank'),
('notifyTest', 'ok', '', '', simple_test, 'SampleTest.test_xxxxxx2'),
('notifyTest', 'ok', 'non unique name ran', '', simple_test, 'SampleTest.test_non_unique_name'),
('notifyTestRunFinished',),
('notifyTestsCollected', 6)
]
else:
@ -397,7 +384,7 @@ class RunfilesTest(unittest.TestCase):
('notifyTest', 'ok', '', '', simple_test, 'SampleTest.test_xxxxxx2'),
('notifyTest', 'ok', '', '', simple_test2, 'YetAnotherSampleTest.test_abc'),
]
if not IS_JYTHON:
if 'samples.simpleClass_test' in str(notifications):
expected.append(('notifyTest', 'error', '', 'ValueError: This is an INTENTIONAL value error in setUpClass.',
@ -414,7 +401,7 @@ class RunfilesTest(unittest.TestCase):
expected.append(('notifyTest', 'ok', '', '', simpleModule_test, 'SetUpModuleTest.test_blank'))
expected.append(('notifyTestRunFinished',))
expected.sort()
new_notifications = []
for notification in expected:

View file

@ -1,320 +0,0 @@
import Cython
from Cython.Compiler import Nodes
from Cython.Compiler.Errors import CompileError
import sys
import json
import traceback
import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Note: Cython has some recursive structures in some classes, so, parsing only what we really
# expect may be a bit better (although our recursion check should get that too).
accepted_info = {
'PyClassDef': set(['name', 'doc', 'body', 'bases', 'decorators', 'pos'])
}
def node_to_dict(node, _recurse_level=0, memo=None):
nodeid = id(node) # just to be sure it's checked by identity in the memo
if memo is None:
memo = {}
else:
if nodeid in memo:
# i.e.: prevent Nodes recursion.
return None
memo[nodeid] = 1
try:
_recurse_level += 1
assert _recurse_level < 500, "It seems we are recursing..."
node_name = node.__class__.__name__
# print((' ' * _recurse_level) + node_name)
if node_name.endswith("Node"):
node_name = node_name[:-4]
data = {"__node__": node_name}
if _recurse_level == 1:
data['__version__'] = Cython.__version__
dct = node.__dict__
accepted = accepted_info.get(node_name)
if accepted is None:
items = [(key, value) for key, value in dct.items()]
else:
# for key in dct.keys():
# if key not in accepted:
# print('Skipped: %s' % (key,))
items = [(key, dct[key]) for key in accepted]
for attr_name, attr in items:
if attr_name in ("pos", "position"):
data["line"] = attr[1]
data["col"] = attr[2]
continue
if isinstance(attr, Nodes.Node):
data[attr_name] = node_to_dict(attr, _recurse_level, memo)
elif isinstance(attr, (list, tuple)):
lst = []
for x in attr:
if isinstance(x, Nodes.Node):
lst.append(node_to_dict(x, _recurse_level, memo))
elif isinstance(x, (bytes, str)):
lst.append(x)
elif hasattr(x, 'encode'):
lst.append(x.encode('utf-8', 'replace'))
elif isinstance(x, (list, tuple)):
tup = []
for y in x:
if isinstance(y, (str, bytes)):
tup.append(y)
elif isinstance(y, Nodes.Node):
tup.append(node_to_dict(y, _recurse_level, memo))
lst.append(tup)
data[attr_name] = lst
else:
data[attr_name] = str(attr)
finally:
memo.pop(nodeid, None)
return data
def source_to_dict(source, name=None):
from Cython.Compiler.TreeFragment import parse_from_strings, StatListNode
# Right now we don't collect errors, but leave the API compatible already.
collected_errors = []
try:
# Note: we don't use TreeFragment because it formats the code removing empty lines
# (which ends up creating an AST with wrong lines).
if not name:
name = "(tree fragment)"
mod = t = parse_from_strings(name, source)
t = t.body # Make sure a StatListNode is at the top
if not isinstance(t, StatListNode):
t = StatListNode(pos=mod.pos, stats=[t])
root = t
except CompileError as e:
return {
'ast': None,
'errors': [node_to_dict(e)]
}
except BaseException as e:
as_dict = {
'ast': None,
'errors': [{
'__node__': 'CompileError', 'line': 1, 'col': 1, 'message_only': str(e)
}]
}
return as_dict
result = {'ast': node_to_dict(root), 'errors': [node_to_dict(e) for e in collected_errors]}
return result
from _pydev_bundle import pydev_localhost
HOST = pydev_localhost.get_localhost() # Symbolic name meaning the local host
IS_PYTHON_3_ONWARDS = sys.version_info[0] >= 3
def dbg(s):
sys.stderr.write('%s\n' % (s,))
# f = open('c:/temp/test.txt', 'a')
# print_ >> f, s
# f.close()
SERVER_NAME = 'CythonJson'
class Exit(Exception):
pass
class CythonJsonServer(object):
def __init__(self, port):
self.ended = False
self._buffer = b''
self.port = port
self.socket = None # socket to send messages.
self.exit_process_on_kill = True
def emulated_sendall(self, msg):
MSGLEN = 1024 * 20
totalsent = 0
while totalsent < MSGLEN:
sent = self.socket.send(msg[totalsent:])
if sent == 0:
return
totalsent = totalsent + sent
def send(self, msg):
if not isinstance(msg, bytes):
msg = msg.encode('utf-8', 'replace')
if not hasattr(self.socket, 'sendall'):
# Older versions (jython 2.1)
self.emulated_sendall(msg)
else:
if IS_PYTHON_3_ONWARDS:
self.socket.sendall(msg)
else:
self.socket.sendall(msg)
def connect_to_server(self):
from _pydev_imps._pydev_saved_modules import socket
self.socket = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((HOST, self.port))
except:
sys.stderr.write('Error on connect_to_server with parameters: host: %s port: %s\n' % (HOST, self.port))
raise
def _read(self, size):
while True:
buffer_len = len(self._buffer)
if buffer_len == size:
ret = self._buffer
self._buffer = b''
return ret
if buffer_len > size:
ret = self._buffer[:size]
self._buffer = self._buffer[size:]
return ret
try:
r = self.socket.recv(max(size - buffer_len, 1024))
except OSError:
return b''
if not r:
return b''
self._buffer += r
def _read_line(self):
while True:
i = self._buffer.find(b'\n')
if i != -1:
i += 1 # Add the newline to the return
ret = self._buffer[:i]
self._buffer = self._buffer[i:]
return ret
else:
try:
r = self.socket.recv(1024)
except OSError:
return b''
if not r:
return b''
self._buffer += r
def process_command(self, json_contents):
try:
as_dict = json.loads(json_contents)
if as_dict['command'] == 'cython_to_json_ast':
contents = as_dict['contents']
as_dict = source_to_dict(contents)
result = as_dict
else:
result = {'command': '<unexpected>', 'received': json_contents}
except:
try:
from StringIO import StringIO
except:
from io import StringIO
s = StringIO()
traceback.print_exc(file=s)
result = {'command': '<errored>', 'error': s.getvalue()}
return json.dumps(result)
def run(self):
# Echo server program
try:
dbg(SERVER_NAME + ' connecting to java server on %s (%s)' % (HOST, self.port))
# after being connected, create a socket as a client.
self.connect_to_server()
dbg(SERVER_NAME + ' Connected to java server')
content_len = -1
while True:
dbg('Will read line...')
line = self._read_line()
dbg('Read: %s' % (line,))
if not line:
raise Exit()
if line.startswith(b'Content-Length:'):
content_len = int(line.strip().split(b':', 1)[1])
dbg('Found content len: %s' % (content_len,))
continue
if content_len != -1:
# If we previously received a content length, read until a '\r\n'.
if line == b'\r\n':
dbg('Will read contents (%s)...' % (content_len,))
json_contents = self._read(content_len)
dbg('Read: %s' % (json_contents,))
content_len = -1
if len(json_contents) == 0:
raise Exit()
# We just received a json message, let's process it.
dbg('Will process...')
output = self.process_command(json_contents)
if not isinstance(output, bytes):
output = output.encode('utf-8', 'replace')
self.send('Content-Length: %s\r\n\r\n' % (len(output),))
self.send(output)
continue
except Exit:
sys.exit(0)
except:
traceback.print_exc()
raise
if __name__ == '__main__':
args = sys.argv[1:]
if args == ['-']:
# Read from stdin/dump to stdout
if sys.version_info < (3,):
stdin_get_value = sys.stdin.read
else:
stdin_get_value = sys.stdin.buffer.read
source = stdin_get_value()
# After reading, convert to unicode (use the stdout encoding)
source = source.decode(sys.stdout.encoding, 'replace')
as_dict = source_to_dict(source)
print(json.dumps(as_dict, indent=4))
sys.stdout.flush()
else:
# start as server
port = int(sys.argv[1]) # this is from where we want to receive messages.
t = CythonJsonServer(port)
dbg(SERVER_NAME + ' will start')
t.run()

View file

@ -1,28 +0,0 @@
"""__init__.py.
Defines the isort module to include the SortImports utility class as well as any defined settings.
Copyright (C) 2013 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from . import settings
from .isort import SortImports
__version__ = "4.2.15"

View file

@ -1,3 +0,0 @@
from isort.main import main
main()

View file

@ -1,82 +0,0 @@
"""isort.py.
Defines a git hook to allow pre-commit warnings and errors about import order.
usage:
exit_code = git_hook(strict=True)
Copyright (C) 2015 Helen Sherwood-Taylor
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import subprocess
from isort import SortImports
def get_output(command):
"""
Run a command and return raw output
:param str command: the command to run
:returns: the stdout output of the command
"""
return subprocess.check_output(command.split())
def get_lines(command):
"""
Run a command and return lines of output
:param str command: the command to run
:returns: list of whitespace-stripped lines output by command
"""
stdout = get_output(command)
return [line.strip().decode('utf-8') for line in stdout.splitlines()]
def git_hook(strict=False):
"""
Git pre-commit hook to check staged files for isort errors
:param bool strict - if True, return number of errors on exit,
causing the hook to fail. If False, return zero so it will
just act as a warning.
:return number of errors if in strict mode, 0 otherwise.
"""
# Get list of files modified and staged
diff_cmd = "git diff-index --cached --name-only --diff-filter=ACMRTUXB HEAD"
files_modified = get_lines(diff_cmd)
errors = 0
for filename in files_modified:
if filename.endswith('.py'):
# Get the staged contents of the file
staged_cmd = "git show :%s" % filename
staged_contents = get_output(staged_cmd)
sort = SortImports(
file_path=filename,
file_contents=staged_contents.decode(),
check=True
)
if sort.incorrectly_sorted:
errors += 1
return errors if strict else 0

View file

@ -1,969 +0,0 @@
"""isort.py.
Exposes a simple library to sort through imports within Python code
usage:
SortImports(file_name)
or:
sorted = SortImports(file_contents=file_contents).output
Copyright (C) 2013 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import copy
import io
import itertools
import os
import re
import sys
from collections import namedtuple
from datetime import datetime
from difflib import unified_diff
from fnmatch import fnmatch
from glob import glob
from . import settings
from .natural import nsorted
from .pie_slice import OrderedDict, OrderedSet, input, itemsview
KNOWN_SECTION_MAPPING = {
'STDLIB': 'STANDARD_LIBRARY',
'FUTURE': 'FUTURE_LIBRARY',
'FIRSTPARTY': 'FIRST_PARTY',
'THIRDPARTY': 'THIRD_PARTY',
}
class SortImports(object):
incorrectly_sorted = False
skipped = False
def __init__(self, file_path=None, file_contents=None, write_to_stdout=False, check=False,
show_diff=False, settings_path=None, ask_to_apply=False, **setting_overrides):
if not settings_path and file_path:
settings_path = os.path.dirname(os.path.abspath(file_path))
settings_path = settings_path or os.getcwd()
self.config = settings.from_path(settings_path).copy()
for key, value in itemsview(setting_overrides):
access_key = key.replace('not_', '').lower()
# The sections config needs to retain order and can't be converted to a set.
if access_key != 'sections' and type(self.config.get(access_key)) in (list, tuple):
if key.startswith('not_'):
self.config[access_key] = list(set(self.config[access_key]).difference(value))
else:
self.config[access_key] = list(set(self.config[access_key]).union(value))
else:
self.config[key] = value
if self.config['force_alphabetical_sort']:
self.config.update({'force_alphabetical_sort_within_sections': True,
'no_sections': True,
'lines_between_types': 1,
'from_first': True})
indent = str(self.config['indent'])
if indent.isdigit():
indent = " " * int(indent)
else:
indent = indent.strip("'").strip('"')
if indent.lower() == "tab":
indent = "\t"
self.config['indent'] = indent
self.place_imports = {}
self.import_placements = {}
self.remove_imports = [self._format_simplified(removal) for removal in self.config['remove_imports']]
self.add_imports = [self._format_natural(addition) for addition in self.config['add_imports']]
self._section_comments = ["# " + value for key, value in itemsview(self.config) if
key.startswith('import_heading') and value]
self.file_encoding = 'utf-8'
file_name = file_path
self.file_path = file_path or ""
if file_path:
file_path = os.path.abspath(file_path)
if settings.should_skip(file_path, self.config):
self.skipped = True
if self.config['verbose']:
print("WARNING: {0} was skipped as it's listed in 'skip' setting"
" or matches a glob in 'skip_glob' setting".format(file_path))
file_contents = None
elif not file_contents:
self.file_path = file_path
self.file_encoding = coding_check(file_path)
with io.open(file_path, encoding=self.file_encoding) as file_to_import_sort:
file_contents = file_to_import_sort.read()
if file_contents is None or ("isort:" + "skip_file") in file_contents:
return
self.in_lines = file_contents.split("\n")
self.original_length = len(self.in_lines)
if (self.original_length > 1 or self.in_lines[:1] not in ([], [""])) or self.config['force_adds']:
for add_import in self.add_imports:
self.in_lines.append(add_import)
self.number_of_lines = len(self.in_lines)
self.out_lines = []
self.comments = {'from': {}, 'straight': {}, 'nested': {}, 'above': {'straight': {}, 'from': {}}}
self.imports = OrderedDict()
self.as_map = {}
section_names = self.config['sections']
self.sections = namedtuple('Sections', section_names)(*[name for name in section_names])
for section in itertools.chain(self.sections, self.config['forced_separate']):
self.imports[section] = {'straight': OrderedSet(), 'from': OrderedDict()}
self.known_patterns = []
for placement in reversed(self.sections):
known_placement = KNOWN_SECTION_MAPPING.get(placement, placement)
config_key = 'known_{0}'.format(known_placement.lower())
known_patterns = self.config.get(config_key, [])
for known_pattern in known_patterns:
self.known_patterns.append((re.compile('^' + known_pattern.replace('*', '.*').replace('?', '.?') + '$'),
placement))
self.index = 0
self.import_index = -1
self._first_comment_index_start = -1
self._first_comment_index_end = -1
self._parse()
if self.import_index != -1:
self._add_formatted_imports()
self.length_change = len(self.out_lines) - self.original_length
while self.out_lines and self.out_lines[-1].strip() == "":
self.out_lines.pop(-1)
self.out_lines.append("")
self.output = "\n".join(self.out_lines)
if self.config['atomic']:
try:
compile(self._strip_top_comments(self.out_lines), self.file_path, 'exec', 0, 1)
except SyntaxError:
self.output = file_contents
self.incorrectly_sorted = True
try:
compile(self._strip_top_comments(self.in_lines), self.file_path, 'exec', 0, 1)
print("ERROR: {0} isort would have introduced syntax errors, please report to the project!". \
format(self.file_path))
except SyntaxError:
print("ERROR: {0} File contains syntax errors.".format(self.file_path))
return
if check:
check_output = self.output
check_against = file_contents
if self.config['ignore_whitespace']:
check_output = check_output.replace("\n", "").replace(" ", "")
check_against = check_against.replace("\n", "").replace(" ", "")
if check_output == check_against:
if self.config['verbose']:
print("SUCCESS: {0} Everything Looks Good!".format(self.file_path))
return
print("ERROR: {0} Imports are incorrectly sorted.".format(self.file_path))
self.incorrectly_sorted = True
if show_diff or self.config['show_diff']:
self._show_diff(file_contents)
elif write_to_stdout:
sys.stdout.write(self.output)
elif file_name and not check:
if ask_to_apply:
if self.output == file_contents:
return
self._show_diff(file_contents)
answer = None
while answer not in ('yes', 'y', 'no', 'n', 'quit', 'q'):
answer = input("Apply suggested changes to '{0}' [y/n/q]?".format(self.file_path)).lower()
if answer in ('no', 'n'):
return
if answer in ('quit', 'q'):
sys.exit(1)
with io.open(self.file_path, encoding=self.file_encoding, mode='w') as output_file:
output_file.write(self.output)
def _show_diff(self, file_contents):
for line in unified_diff(
file_contents.splitlines(1),
self.output.splitlines(1),
fromfile=self.file_path + ':before',
tofile=self.file_path + ':after',
fromfiledate=str(datetime.fromtimestamp(os.path.getmtime(self.file_path))
if self.file_path else datetime.now()),
tofiledate=str(datetime.now())
):
sys.stdout.write(line)
@staticmethod
def _strip_top_comments(lines):
"""Strips # comments that exist at the top of the given lines"""
lines = copy.copy(lines)
while lines and lines[0].startswith("#"):
lines = lines[1:]
return "\n".join(lines)
def place_module(self, module_name):
"""Tries to determine if a module is a python std import, third party import, or project code:
if it can't determine - it assumes it is project code
"""
for forced_separate in self.config['forced_separate']:
# Ensure all forced_separate patterns will match to end of string
path_glob = forced_separate
if not forced_separate.endswith('*'):
path_glob = '%s*' % forced_separate
if fnmatch(module_name, path_glob) or fnmatch(module_name, '.' + path_glob):
return forced_separate
if module_name.startswith("."):
return self.sections.LOCALFOLDER
# Try to find most specific placement instruction match (if any)
parts = module_name.split('.')
module_names_to_check = ['.'.join(parts[:first_k]) for first_k in range(len(parts), 0, -1)]
for module_name_to_check in module_names_to_check:
for pattern, placement in self.known_patterns:
if pattern.match(module_name_to_check):
return placement
# Use a copy of sys.path to avoid any unintended modifications
# to it - e.g. `+=` used below will change paths in place and
# if not copied, consequently sys.path, which will grow unbounded
# with duplicates on every call to this method.
paths = list(sys.path)
virtual_env = self.config.get('virtual_env') or os.environ.get('VIRTUAL_ENV')
virtual_env_src = False
if virtual_env:
paths += [path for path in glob('{0}/lib/python*/site-packages'.format(virtual_env))
if path not in paths]
paths += [path for path in glob('{0}/src/*'.format(virtual_env)) if os.path.isdir(path)]
virtual_env_src = '{0}/src/'.format(virtual_env)
# handle case-insensitive paths on windows
stdlib_lib_prefix = os.path.normcase(get_stdlib_path())
for prefix in paths:
module_path = "/".join((prefix, module_name.replace(".", "/")))
package_path = "/".join((prefix, module_name.split(".")[0]))
is_module = (exists_case_sensitive(module_path + ".py") or
exists_case_sensitive(module_path + ".so"))
is_package = exists_case_sensitive(package_path) and os.path.isdir(package_path)
if is_module or is_package:
if ('site-packages' in prefix or 'dist-packages' in prefix or
(virtual_env and virtual_env_src in prefix)):
return self.sections.THIRDPARTY
elif os.path.normcase(prefix).startswith(stdlib_lib_prefix):
return self.sections.STDLIB
else:
return self.config['default_section']
return self.config['default_section']
def _get_line(self):
"""Returns the current line from the file while incrementing the index."""
line = self.in_lines[self.index]
self.index += 1
return line
@staticmethod
def _import_type(line):
"""If the current line is an import line it will return its type (from or straight)"""
if "isort:skip" in line:
return
elif line.startswith('import '):
return "straight"
elif line.startswith('from '):
return "from"
def _at_end(self):
"""returns True if we are at the end of the file."""
return self.index == self.number_of_lines
@staticmethod
def _module_key(module_name, config, sub_imports=False, ignore_case=False):
prefix = ""
if ignore_case:
module_name = str(module_name).lower()
else:
module_name = str(module_name)
if sub_imports and config['order_by_type']:
if module_name.isupper() and len(module_name) > 1:
prefix = "A"
elif module_name[0:1].isupper():
prefix = "B"
else:
prefix = "C"
module_name = module_name.lower()
return "{0}{1}{2}".format(module_name in config['force_to_top'] and "A" or "B", prefix,
config['length_sort'] and (str(len(module_name)) + ":" + module_name) or module_name)
def _add_comments(self, comments, original_string=""):
"""
Returns a string with comments added
"""
return comments and "{0} # {1}".format(self._strip_comments(original_string)[0],
"; ".join(comments)) or original_string
def _wrap(self, line):
"""
Returns an import wrapped to the specified line-length, if possible.
"""
wrap_mode = self.config['multi_line_output']
if len(line) > self.config['line_length'] and wrap_mode != settings.WrapModes.NOQA:
for splitter in ("import", ".", "as"):
exp = r"\b" + re.escape(splitter) + r"\b"
if re.search(exp, line) and not line.strip().startswith(splitter):
line_parts = re.split(exp, line)
next_line = []
while (len(line) + 2) > (self.config['wrap_length'] or self.config['line_length']) and line_parts:
next_line.append(line_parts.pop())
line = splitter.join(line_parts)
if not line:
line = next_line.pop()
cont_line = self._wrap(self.config['indent'] + splitter.join(next_line).lstrip())
if self.config['use_parentheses']:
output = "{0}{1} (\n{2}{3}{4})".format(
line, splitter, cont_line,
"," if self.config['include_trailing_comma'] else "",
"\n" if wrap_mode in (
settings.WrapModes.VERTICAL_HANGING_INDENT,
settings.WrapModes.VERTICAL_GRID_GROUPED,
) else "")
lines = output.split('\n')
if ' #' in lines[-1] and lines[-1].endswith(')'):
line, comment = lines[-1].split(' #', 1)
lines[-1] = line + ') #' + comment[:-1]
return '\n'.join(lines)
return "{0}{1} \\\n{2}".format(line, splitter, cont_line)
elif len(line) > self.config['line_length'] and wrap_mode == settings.WrapModes.NOQA:
if "# NOQA" not in line:
return "{0} # NOQA".format(line)
return line
def _add_straight_imports(self, straight_modules, section, section_output):
for module in straight_modules:
if module in self.remove_imports:
continue
if module in self.as_map:
import_definition = "import {0} as {1}".format(module, self.as_map[module])
else:
import_definition = "import {0}".format(module)
comments_above = self.comments['above']['straight'].pop(module, None)
if comments_above:
section_output.extend(comments_above)
section_output.append(self._add_comments(self.comments['straight'].get(module), import_definition))
def _add_from_imports(self, from_modules, section, section_output, ignore_case):
for module in from_modules:
if module in self.remove_imports:
continue
import_start = "from {0} import ".format(module)
from_imports = self.imports[section]['from'][module]
from_imports = nsorted(from_imports, key=lambda key: self._module_key(key, self.config, True, ignore_case))
if self.remove_imports:
from_imports = [line for line in from_imports if not "{0}.{1}".format(module, line) in
self.remove_imports]
for from_import in copy.copy(from_imports):
submodule = module + "." + from_import
import_as = self.as_map.get(submodule, False)
if import_as:
import_definition = "{0} as {1}".format(from_import, import_as)
if self.config['combine_as_imports'] and not ("*" in from_imports and
self.config['combine_star']):
from_imports[from_imports.index(from_import)] = import_definition
else:
import_statement = import_start + import_definition
force_grid_wrap = self.config['force_grid_wrap']
comments = self.comments['straight'].get(submodule)
import_statement = self._add_comments(comments, self._wrap(import_statement))
from_imports.remove(from_import)
section_output.append(import_statement)
if from_imports:
comments = self.comments['from'].pop(module, ())
if "*" in from_imports and self.config['combine_star']:
import_statement = self._wrap(self._add_comments(comments, "{0}*".format(import_start)))
elif self.config['force_single_line']:
import_statements = []
for from_import in from_imports:
single_import_line = self._add_comments(comments, import_start + from_import)
comment = self.comments['nested'].get(module, {}).pop(from_import, None)
if comment:
single_import_line += "{0} {1}".format(comments and ";" or " #", comment)
import_statements.append(self._wrap(single_import_line))
comments = None
import_statement = "\n".join(import_statements)
else:
star_import = False
if "*" in from_imports:
section_output.append(self._add_comments(comments, "{0}*".format(import_start)))
from_imports.remove('*')
star_import = True
comments = None
for from_import in copy.copy(from_imports):
comment = self.comments['nested'].get(module, {}).pop(from_import, None)
if comment:
single_import_line = self._add_comments(comments, import_start + from_import)
single_import_line += "{0} {1}".format(comments and ";" or " #", comment)
above_comments = self.comments['above']['from'].pop(module, None)
if above_comments:
section_output.extend(above_comments)
section_output.append(self._wrap(single_import_line))
from_imports.remove(from_import)
comments = None
if star_import:
import_statement = import_start + (", ").join(from_imports)
else:
import_statement = self._add_comments(comments, import_start + (", ").join(from_imports))
if not from_imports:
import_statement = ""
do_multiline_reformat = False
force_grid_wrap = self.config['force_grid_wrap']
if force_grid_wrap and len(from_imports) >= force_grid_wrap:
do_multiline_reformat = True
if len(import_statement) > self.config['line_length'] and len(from_imports) > 1:
do_multiline_reformat = True
# If line too long AND have imports AND we are NOT using GRID or VERTICAL wrap modes
if (len(import_statement) > self.config['line_length'] and len(from_imports) > 0 and
self.config['multi_line_output'] not in (1, 0)):
do_multiline_reformat = True
if do_multiline_reformat:
import_statement = self._multi_line_reformat(import_start, from_imports, comments)
if not do_multiline_reformat and len(import_statement) > self.config['line_length']:
import_statement = self._wrap(import_statement)
if import_statement:
above_comments = self.comments['above']['from'].pop(module, None)
if above_comments:
section_output.extend(above_comments)
section_output.append(import_statement)
def _multi_line_reformat(self, import_start, from_imports, comments):
output_mode = settings.WrapModes._fields[self.config['multi_line_output']].lower()
formatter = getattr(self, "_output_" + output_mode, self._output_grid)
dynamic_indent = " " * (len(import_start) + 1)
indent = self.config['indent']
line_length = self.config['wrap_length'] or self.config['line_length']
import_statement = formatter(import_start, copy.copy(from_imports),
dynamic_indent, indent, line_length, comments)
if self.config['balanced_wrapping']:
lines = import_statement.split("\n")
line_count = len(lines)
if len(lines) > 1:
minimum_length = min([len(line) for line in lines[:-1]])
else:
minimum_length = 0
new_import_statement = import_statement
while (len(lines[-1]) < minimum_length and
len(lines) == line_count and line_length > 10):
import_statement = new_import_statement
line_length -= 1
new_import_statement = formatter(import_start, copy.copy(from_imports),
dynamic_indent, indent, line_length, comments)
lines = new_import_statement.split("\n")
if import_statement.count('\n') == 0:
return self._wrap(import_statement)
return import_statement
def _add_formatted_imports(self):
"""Adds the imports back to the file.
(at the index of the first import) sorted alphabetically and split between groups
"""
sort_ignore_case = self.config['force_alphabetical_sort_within_sections']
sections = itertools.chain(self.sections, self.config['forced_separate'])
if self.config['no_sections']:
self.imports['no_sections'] = {'straight': [], 'from': {}}
for section in sections:
self.imports['no_sections']['straight'].extend(self.imports[section].get('straight', []))
self.imports['no_sections']['from'].update(self.imports[section].get('from', {}))
sections = ('no_sections', )
output = []
for section in sections:
straight_modules = self.imports[section]['straight']
straight_modules = nsorted(straight_modules, key=lambda key: self._module_key(key, self.config))
from_modules = self.imports[section]['from']
from_modules = nsorted(from_modules, key=lambda key: self._module_key(key, self.config))
section_output = []
if self.config['from_first']:
self._add_from_imports(from_modules, section, section_output, sort_ignore_case)
if self.config['lines_between_types'] and from_modules and straight_modules:
section_output.extend([''] * self.config['lines_between_types'])
self._add_straight_imports(straight_modules, section, section_output)
else:
self._add_straight_imports(straight_modules, section, section_output)
if self.config['lines_between_types'] and from_modules and straight_modules:
section_output.extend([''] * self.config['lines_between_types'])
self._add_from_imports(from_modules, section, section_output, sort_ignore_case)
if self.config['force_sort_within_sections']:
def by_module(line):
section = 'B'
if line.startswith('#'):
return 'AA'
line = re.sub('^from ', '', line)
line = re.sub('^import ', '', line)
if line.split(' ')[0] in self.config['force_to_top']:
section = 'A'
if not self.config['order_by_type']:
line = line.lower()
return '{0}{1}'.format(section, line)
section_output = nsorted(section_output, key=by_module)
if section_output:
section_name = section
if section_name in self.place_imports:
self.place_imports[section_name] = section_output
continue
section_title = self.config.get('import_heading_' + str(section_name).lower(), '')
if section_title:
section_comment = "# {0}".format(section_title)
if not section_comment in self.out_lines[0:1] and not section_comment in self.in_lines[0:1]:
section_output.insert(0, section_comment)
output += section_output + ([''] * self.config['lines_between_sections'])
while [character.strip() for character in output[-1:]] == [""]:
output.pop()
output_at = 0
if self.import_index < self.original_length:
output_at = self.import_index
elif self._first_comment_index_end != -1 and self._first_comment_index_start <= 2:
output_at = self._first_comment_index_end
self.out_lines[output_at:0] = output
imports_tail = output_at + len(output)
while [character.strip() for character in self.out_lines[imports_tail: imports_tail + 1]] == [""]:
self.out_lines.pop(imports_tail)
if len(self.out_lines) > imports_tail:
next_construct = ""
self._in_quote = False
tail = self.out_lines[imports_tail:]
for index, line in enumerate(tail):
if not self._skip_line(line) and line.strip():
if line.strip().startswith("#") and len(tail) > (index + 1) and tail[index + 1].strip():
continue
next_construct = line
break
if self.config['lines_after_imports'] != -1:
self.out_lines[imports_tail:0] = ["" for line in range(self.config['lines_after_imports'])]
elif next_construct.startswith("def") or next_construct.startswith("class") or \
next_construct.startswith("@") or next_construct.startswith("async def"):
self.out_lines[imports_tail:0] = ["", ""]
else:
self.out_lines[imports_tail:0] = [""]
if self.place_imports:
new_out_lines = []
for index, line in enumerate(self.out_lines):
new_out_lines.append(line)
if line in self.import_placements:
new_out_lines.extend(self.place_imports[self.import_placements[line]])
if len(self.out_lines) <= index or self.out_lines[index + 1].strip() != "":
new_out_lines.append("")
self.out_lines = new_out_lines
def _output_grid(self, statement, imports, white_space, indent, line_length, comments):
statement += "(" + imports.pop(0)
while imports:
next_import = imports.pop(0)
next_statement = self._add_comments(comments, statement + ", " + next_import)
if len(next_statement.split("\n")[-1]) + 1 > line_length:
lines = ['{0}{1}'.format(white_space, next_import.split(" ")[0])]
for part in next_import.split(" ")[1:]:
new_line = '{0} {1}'.format(lines[-1], part)
if len(new_line) + 1 > line_length:
lines.append('{0}{1}'.format(white_space, part))
else:
lines[-1] = new_line
next_import = '\n'.join(lines)
statement = (self._add_comments(comments, "{0},".format(statement)) +
"\n{0}".format(next_import))
comments = None
else:
statement += ", " + next_import
return statement + ("," if self.config['include_trailing_comma'] else "") + ")"
def _output_vertical(self, statement, imports, white_space, indent, line_length, comments):
first_import = self._add_comments(comments, imports.pop(0) + ",") + "\n" + white_space
return "{0}({1}{2}{3})".format(
statement,
first_import,
(",\n" + white_space).join(imports),
"," if self.config['include_trailing_comma'] else "",
)
def _output_hanging_indent(self, statement, imports, white_space, indent, line_length, comments):
statement += imports.pop(0)
while imports:
next_import = imports.pop(0)
next_statement = self._add_comments(comments, statement + ", " + next_import)
if len(next_statement.split("\n")[-1]) + 3 > line_length:
next_statement = (self._add_comments(comments, "{0}, \\".format(statement)) +
"\n{0}{1}".format(indent, next_import))
comments = None
statement = next_statement
return statement
def _output_vertical_hanging_indent(self, statement, imports, white_space, indent, line_length, comments):
return "{0}({1}\n{2}{3}{4}\n)".format(
statement,
self._add_comments(comments),
indent,
(",\n" + indent).join(imports),
"," if self.config['include_trailing_comma'] else "",
)
def _output_vertical_grid_common(self, statement, imports, white_space, indent, line_length, comments):
statement += self._add_comments(comments, "(") + "\n" + indent + imports.pop(0)
while imports:
next_import = imports.pop(0)
next_statement = "{0}, {1}".format(statement, next_import)
if len(next_statement.split("\n")[-1]) + 1 > line_length:
next_statement = "{0},\n{1}{2}".format(statement, indent, next_import)
statement = next_statement
if self.config['include_trailing_comma']:
statement += ','
return statement
def _output_vertical_grid(self, statement, imports, white_space, indent, line_length, comments):
return self._output_vertical_grid_common(statement, imports, white_space, indent, line_length, comments) + ")"
def _output_vertical_grid_grouped(self, statement, imports, white_space, indent, line_length, comments):
return self._output_vertical_grid_common(statement, imports, white_space, indent, line_length, comments) + "\n)"
def _output_noqa(self, statement, imports, white_space, indent, line_length, comments):
retval = '{0}{1}'.format(statement, ', '.join(imports))
comment_str = ' '.join(comments)
if comments:
if len(retval) + 4 + len(comment_str) <= line_length:
return '{0} # {1}'.format(retval, comment_str)
else:
if len(retval) <= line_length:
return retval
if comments:
if "NOQA" in comments:
return '{0} # {1}'.format(retval, comment_str)
else:
return '{0} # NOQA {1}'.format(retval, comment_str)
else:
return '{0} # NOQA'.format(retval)
@staticmethod
def _strip_comments(line, comments=None):
"""Removes comments from import line."""
if comments is None:
comments = []
new_comments = False
comment_start = line.find("#")
if comment_start != -1:
comments.append(line[comment_start + 1:].strip())
new_comments = True
line = line[:comment_start]
return line, comments, new_comments
@staticmethod
def _format_simplified(import_line):
import_line = import_line.strip()
if import_line.startswith("from "):
import_line = import_line.replace("from ", "")
import_line = import_line.replace(" import ", ".")
elif import_line.startswith("import "):
import_line = import_line.replace("import ", "")
return import_line
@staticmethod
def _format_natural(import_line):
import_line = import_line.strip()
if not import_line.startswith("from ") and not import_line.startswith("import "):
if not "." in import_line:
return "import {0}".format(import_line)
parts = import_line.split(".")
end = parts.pop(-1)
return "from {0} import {1}".format(".".join(parts), end)
return import_line
def _skip_line(self, line):
skip_line = self._in_quote
if self.index == 1 and line.startswith("#"):
self._in_top_comment = True
return True
elif self._in_top_comment:
if not line.startswith("#"):
self._in_top_comment = False
self._first_comment_index_end = self.index - 1
if '"' in line or "'" in line:
index = 0
if self._first_comment_index_start == -1 and (line.startswith('"') or line.startswith("'")):
self._first_comment_index_start = self.index
while index < len(line):
if line[index] == "\\":
index += 1
elif self._in_quote:
if line[index:index + len(self._in_quote)] == self._in_quote:
self._in_quote = False
if self._first_comment_index_end < self._first_comment_index_start:
self._first_comment_index_end = self.index
elif line[index] in ("'", '"'):
long_quote = line[index:index + 3]
if long_quote in ('"""', "'''"):
self._in_quote = long_quote
index += 2
else:
self._in_quote = line[index]
elif line[index] == "#":
break
index += 1
return skip_line or self._in_quote or self._in_top_comment
def _strip_syntax(self, import_string):
import_string = import_string.replace("_import", "[[i]]")
for remove_syntax in ['\\', '(', ')', ',']:
import_string = import_string.replace(remove_syntax, " ")
import_list = import_string.split()
for key in ('from', 'import'):
if key in import_list:
import_list.remove(key)
import_string = ' '.join(import_list)
import_string = import_string.replace("[[i]]", "_import")
return import_string.replace("{ ", "{|").replace(" }", "|}")
def _parse(self):
"""Parses a python file taking out and categorizing imports."""
self._in_quote = False
self._in_top_comment = False
while not self._at_end():
line = self._get_line()
statement_index = self.index
skip_line = self._skip_line(line)
if line in self._section_comments and not skip_line:
if self.import_index == -1:
self.import_index = self.index - 1
continue
if "isort:imports-" in line and line.startswith("#"):
section = line.split("isort:imports-")[-1].split()[0].upper()
self.place_imports[section] = []
self.import_placements[line] = section
if ";" in line:
for part in (part.strip() for part in line.split(";")):
if part and not part.startswith("from ") and not part.startswith("import "):
skip_line = True
import_type = self._import_type(line)
if not import_type or skip_line:
self.out_lines.append(line)
continue
for line in (line.strip() for line in line.split(";")):
import_type = self._import_type(line)
if not import_type:
self.out_lines.append(line)
continue
line = line.replace("\t", " ").replace('import*', 'import *')
if self.import_index == -1:
self.import_index = self.index - 1
nested_comments = {}
import_string, comments, new_comments = self._strip_comments(line)
stripped_line = [part for part in self._strip_syntax(import_string).strip().split(" ") if part]
if import_type == "from" and len(stripped_line) == 2 and stripped_line[1] != "*" and new_comments:
nested_comments[stripped_line[-1]] = comments[0]
if "(" in line.split("#")[0] and not self._at_end():
while not line.strip().endswith(")") and not self._at_end():
line, comments, new_comments = self._strip_comments(self._get_line(), comments)
stripped_line = self._strip_syntax(line).strip()
if import_type == "from" and stripped_line and not " " in stripped_line and new_comments:
nested_comments[stripped_line] = comments[-1]
import_string += "\n" + line
else:
while line.strip().endswith("\\"):
line, comments, new_comments = self._strip_comments(self._get_line(), comments)
stripped_line = self._strip_syntax(line).strip()
if import_type == "from" and stripped_line and not " " in stripped_line and new_comments:
nested_comments[stripped_line] = comments[-1]
if import_string.strip().endswith(" import") or line.strip().startswith("import "):
import_string += "\n" + line
else:
import_string = import_string.rstrip().rstrip("\\") + " " + line.lstrip()
if import_type == "from":
import_string = import_string.replace("import(", "import (")
parts = import_string.split(" import ")
from_import = parts[0].split(" ")
import_string = " import ".join([from_import[0] + " " + "".join(from_import[1:])] + parts[1:])
imports = [item.replace("{|", "{ ").replace("|}", " }") for item in
self._strip_syntax(import_string).split()]
if "as" in imports and (imports.index('as') + 1) < len(imports):
while "as" in imports:
index = imports.index('as')
if import_type == "from":
module = imports[0] + "." + imports[index - 1]
self.as_map[module] = imports[index + 1]
else:
module = imports[index - 1]
self.as_map[module] = imports[index + 1]
if not self.config['combine_as_imports']:
self.comments['straight'][module] = comments
comments = []
del imports[index:index + 2]
if import_type == "from":
import_from = imports.pop(0)
placed_module = self.place_module(import_from)
if placed_module == '':
print(
"WARNING: could not place module {0} of line {1} --"
" Do you need to define a default section?".format(import_from, line)
)
root = self.imports[placed_module][import_type]
for import_name in imports:
associated_comment = nested_comments.get(import_name)
if associated_comment:
self.comments['nested'].setdefault(import_from, {})[import_name] = associated_comment
comments.pop(comments.index(associated_comment))
if comments:
self.comments['from'].setdefault(import_from, []).extend(comments)
if len(self.out_lines) > max(self.import_index, self._first_comment_index_end + 1, 1) - 1:
last = self.out_lines and self.out_lines[-1].rstrip() or ""
while (last.startswith("#") and not last.endswith('"""') and not last.endswith("'''") and not
'isort:imports-' in last):
self.comments['above']['from'].setdefault(import_from, []).insert(0, self.out_lines.pop(-1))
if len(self.out_lines) > max(self.import_index - 1, self._first_comment_index_end + 1, 1) - 1:
last = self.out_lines[-1].rstrip()
else:
last = ""
if statement_index - 1 == self.import_index:
self.import_index -= len(self.comments['above']['from'].get(import_from, []))
if root.get(import_from, False):
root[import_from].update(imports)
else:
root[import_from] = OrderedSet(imports)
else:
for module in imports:
if comments:
self.comments['straight'][module] = comments
comments = None
if len(self.out_lines) > max(self.import_index, self._first_comment_index_end + 1, 1) - 1:
last = self.out_lines and self.out_lines[-1].rstrip() or ""
while (last.startswith("#") and not last.endswith('"""') and not last.endswith("'''") and
not 'isort:imports-' in last):
self.comments['above']['straight'].setdefault(module, []).insert(0,
self.out_lines.pop(-1))
if len(self.out_lines) > 0:
last = self.out_lines[-1].rstrip()
else:
last = ""
if self.index - 1 == self.import_index:
self.import_index -= len(self.comments['above']['straight'].get(module, []))
placed_module = self.place_module(module)
if placed_module == '':
print(
"WARNING: could not place module {0} of line {1} --"
" Do you need to define a default section?".format(import_from, line)
)
self.imports[placed_module][import_type].add(module)
def coding_check(fname, default='utf-8'):
# see https://www.python.org/dev/peps/pep-0263/
pattern = re.compile(br'coding[:=]\s*([-\w.]+)')
coding = default
with io.open(fname, 'rb') as f:
for line_number, line in enumerate(f, 1):
groups = re.findall(pattern, line)
if groups:
coding = groups[0].decode('ascii')
break
if line_number > 2:
break
return coding
def get_stdlib_path():
"""Returns the path to the standard lib for the current path installation.
This function can be dropped and "sysconfig.get_paths()" used directly once Python 2.6 support is dropped.
"""
if sys.version_info >= (2, 7):
import sysconfig
return sysconfig.get_paths()['stdlib']
else:
return os.path.join(sys.prefix, 'lib')
def exists_case_sensitive(path):
"""
Returns if the given path exists and also matches the case on Windows.
When finding files that can be imported, it is important for the cases to match because while
file os.path.exists("module.py") and os.path.exists("MODULE.py") both return True on Windows, Python
can only import using the case of the real file.
"""
result = os.path.exists(path)
if sys.platform.startswith('win') and result:
directory, basename = os.path.split(path)
result = basename in os.listdir(directory)
return result

View file

@ -1,296 +0,0 @@
#! /usr/bin/env python
''' Tool for sorting imports alphabetically, and automatically separated into sections.
Copyright (C) 2013 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
'''
from __future__ import absolute_import, division, print_function, unicode_literals
import argparse
import glob
import os
import sys
import setuptools
from isort import SortImports, __version__
from isort.settings import DEFAULT_SECTIONS, default, from_path, should_skip
from .pie_slice import itemsview
INTRO = r"""
/#######################################################################\
`sMMy`
.yyyy- `
##soos## ./o.
` ``..-..` ``...`.`` ` ```` ``-ssso```
.s:-y- .+osssssso/. ./ossss+:so+:` :+o-`/osso:+sssssssso/
.s::y- osss+.``.`` -ssss+-.`-ossso` ssssso/::..::+ssss:::.
.s::y- /ssss+//:-.` `ssss+ `ssss+ sssso` :ssss`
.s::y- `-/+oossssso/ `ssss/ sssso ssss/ :ssss`
.y-/y- ````:ssss` ossso. :ssss: ssss/ :ssss.
`/so:` `-//::/osss+ `+ssss+-/ossso: /sso- `osssso/.
\/ `-/oooo++/- .:/++:/++/-` .. `://++/.
isort your Python imports for you so you don't have to
VERSION {0}
\########################################################################/
""".format(__version__)
def iter_source_code(paths, config, skipped):
"""Iterate over all Python source files defined in paths."""
for path in paths:
if os.path.isdir(path):
if should_skip(path, config, os.getcwd()):
skipped.append(path)
continue
for dirpath, dirnames, filenames in os.walk(path, topdown=True):
for dirname in list(dirnames):
if should_skip(dirname, config, dirpath):
skipped.append(dirname)
dirnames.remove(dirname)
for filename in filenames:
if filename.endswith('.py'):
if should_skip(filename, config, dirpath):
skipped.append(filename)
else:
yield os.path.join(dirpath, filename)
else:
yield path
class ISortCommand(setuptools.Command):
"""The :class:`ISortCommand` class is used by setuptools to perform
imports checks on registered modules.
"""
description = "Run isort on modules registered in setuptools"
user_options = []
def initialize_options(self):
default_settings = default.copy()
for (key, value) in itemsview(default_settings):
setattr(self, key, value)
def finalize_options(self):
"Get options from config files."
self.arguments = {}
computed_settings = from_path(os.getcwd())
for (key, value) in itemsview(computed_settings):
self.arguments[key] = value
def distribution_files(self):
"""Find distribution packages."""
# This is verbatim from flake8
if self.distribution.packages:
package_dirs = self.distribution.package_dir or {}
for package in self.distribution.packages:
pkg_dir = package
if package in package_dirs:
pkg_dir = package_dirs[package]
elif '' in package_dirs:
pkg_dir = package_dirs[''] + os.path.sep + pkg_dir
yield pkg_dir.replace('.', os.path.sep)
if self.distribution.py_modules:
for filename in self.distribution.py_modules:
yield "%s.py" % filename
# Don't miss the setup.py file itself
yield "setup.py"
def run(self):
arguments = self.arguments
wrong_sorted_files = False
arguments['check'] = True
for path in self.distribution_files():
for python_file in glob.iglob(os.path.join(path, '*.py')):
try:
incorrectly_sorted = SortImports(python_file, **arguments).incorrectly_sorted
if incorrectly_sorted:
wrong_sorted_files = True
except IOError as e:
print("WARNING: Unable to parse file {0} due to {1}".format(python_file, e))
if wrong_sorted_files:
exit(1)
def create_parser():
parser = argparse.ArgumentParser(description='Sort Python import definitions alphabetically '
'within logical sections.')
parser.add_argument('files', nargs='*', help='One or more Python source files that need their imports sorted.')
parser.add_argument('-y', '--apply', dest='apply', action='store_true',
help='Tells isort to apply changes recursively without asking')
parser.add_argument('-l', '--lines', help='[Deprecated] The max length of an import line (used for wrapping '
'long imports).',
dest='line_length', type=int)
parser.add_argument('-w', '--line-width', help='The max length of an import line (used for wrapping long imports).',
dest='line_length', type=int)
parser.add_argument('-s', '--skip', help='Files that sort imports should skip over. If you want to skip multiple '
'files you should specify twice: --skip file1 --skip file2.', dest='skip', action='append')
parser.add_argument('-ns', '--dont-skip', help='Files that sort imports should never skip over.',
dest='not_skip', action='append')
parser.add_argument('-sg', '--skip-glob', help='Files that sort imports should skip over.', dest='skip_glob',
action='append')
parser.add_argument('-t', '--top', help='Force specific imports to the top of their appropriate section.',
dest='force_to_top', action='append')
parser.add_argument('-f', '--future', dest='known_future_library', action='append',
help='Force sortImports to recognize a module as part of the future compatibility libraries.')
parser.add_argument('-b', '--builtin', dest='known_standard_library', action='append',
help='Force sortImports to recognize a module as part of the python standard library.')
parser.add_argument('-o', '--thirdparty', dest='known_third_party', action='append',
help='Force sortImports to recognize a module as being part of a third party library.')
parser.add_argument('-p', '--project', dest='known_first_party', action='append',
help='Force sortImports to recognize a module as being part of the current python project.')
parser.add_argument('--virtual-env', dest='virtual_env',
help='Virtual environment to use for determining whether a package is third-party')
parser.add_argument('-m', '--multi-line', dest='multi_line_output', type=int, choices=[0, 1, 2, 3, 4, 5],
help='Multi line output (0-grid, 1-vertical, 2-hanging, 3-vert-hanging, 4-vert-grid, '
'5-vert-grid-grouped).')
parser.add_argument('-i', '--indent', help='String to place for indents defaults to " " (4 spaces).',
dest='indent', type=str)
parser.add_argument('-a', '--add-import', dest='add_imports', action='append',
help='Adds the specified import line to all files, '
'automatically determining correct placement.')
parser.add_argument('-af', '--force-adds', dest='force_adds', action='store_true',
help='Forces import adds even if the original file is empty.')
parser.add_argument('-r', '--remove-import', dest='remove_imports', action='append',
help='Removes the specified import from all files.')
parser.add_argument('-ls', '--length-sort', help='Sort imports by their string length.',
dest='length_sort', action='store_true')
parser.add_argument('-d', '--stdout', help='Force resulting output to stdout, instead of in-place.',
dest='write_to_stdout', action='store_true')
parser.add_argument('-c', '--check-only', action='store_true', dest="check",
help='Checks the file for unsorted / unformatted imports and prints them to the '
'command line without modifying the file.')
parser.add_argument('-ws', '--ignore-whitespace', action='store_true', dest="ignore_whitespace",
help='Tells isort to ignore whitespace differences when --check-only is being used.')
parser.add_argument('-sl', '--force-single-line-imports', dest='force_single_line', action='store_true',
help='Forces all from imports to appear on their own line')
parser.add_argument('-ds', '--no-sections', help='Put all imports into the same section bucket', dest='no_sections',
action='store_true')
parser.add_argument('-sd', '--section-default', dest='default_section',
help='Sets the default section for imports (by default FIRSTPARTY) options: ' +
str(DEFAULT_SECTIONS))
parser.add_argument('-df', '--diff', dest='show_diff', action='store_true',
help="Prints a diff of all the changes isort would make to a file, instead of "
"changing it in place")
parser.add_argument('-e', '--balanced', dest='balanced_wrapping', action='store_true',
help='Balances wrapping to produce the most consistent line length possible')
parser.add_argument('-rc', '--recursive', dest='recursive', action='store_true',
help='Recursively look for Python files of which to sort imports')
parser.add_argument('-ot', '--order-by-type', dest='order_by_type',
action='store_true', help='Order imports by type in addition to alphabetically')
parser.add_argument('-dt', '--dont-order-by-type', dest='dont_order_by_type',
action='store_true', help='Only order imports alphabetically, do not attempt type ordering')
parser.add_argument('-ac', '--atomic', dest='atomic', action='store_true',
help="Ensures the output doesn't save if the resulting file contains syntax errors.")
parser.add_argument('-cs', '--combine-star', dest='combine_star', action='store_true',
help="Ensures that if a star import is present, nothing else is imported from that namespace.")
parser.add_argument('-ca', '--combine-as', dest='combine_as_imports', action='store_true',
help="Combines as imports on the same line.")
parser.add_argument('-tc', '--trailing-comma', dest='include_trailing_comma', action='store_true',
help='Includes a trailing comma on multi line imports that include parentheses.')
parser.add_argument('-v', '--version', action='store_true', dest='show_version')
parser.add_argument('-vb', '--verbose', action='store_true', dest="verbose",
help='Shows verbose output, such as when files are skipped or when a check is successful.')
parser.add_argument('-q', '--quiet', action='store_true', dest="quiet",
help='Shows extra quiet output, only errors are outputted.')
parser.add_argument('-sp', '--settings-path', dest="settings_path",
help='Explicitly set the settings path instead of auto determining based on file location.')
parser.add_argument('-ff', '--from-first', dest='from_first',
help="Switches the typical ordering preference, showing from imports first then straight ones.")
parser.add_argument('-wl', '--wrap-length', dest='wrap_length',
help="Specifies how long lines that are wrapped should be, if not set line_length is used.")
parser.add_argument('-fgw', '--force-grid-wrap', nargs='?', const=2, type=int, dest="force_grid_wrap",
help='Force number of from imports (defaults to 2) to be grid wrapped regardless of line '
'length')
parser.add_argument('-fass', '--force-alphabetical-sort-within-sections', action='store_true',
dest="force_alphabetical_sort", help='Force all imports to be sorted alphabetically within a '
'section')
parser.add_argument('-fas', '--force-alphabetical-sort', action='store_true', dest="force_alphabetical_sort",
help='Force all imports to be sorted as a single section')
parser.add_argument('-fss', '--force-sort-within-sections', action='store_true', dest="force_sort_within_sections",
help='Force imports to be sorted by module, independent of import_type')
parser.add_argument('-lbt', '--lines-between-types', dest='lines_between_types', type=int)
parser.add_argument('-up', '--use-parentheses', dest='use_parentheses', action='store_true',
help='Use parenthesis for line continuation on lenght limit instead of slashes.')
arguments = dict((key, value) for (key, value) in itemsview(vars(parser.parse_args())) if value)
if 'dont_order_by_type' in arguments:
arguments['order_by_type'] = False
return arguments
def main():
arguments = create_parser()
if arguments.get('show_version'):
print(INTRO)
return
if 'settings_path' in arguments:
sp = arguments['settings_path']
arguments['settings_path'] = os.path.abspath(sp) if os.path.isdir(sp) else os.path.dirname(os.path.abspath(sp))
file_names = arguments.pop('files', [])
if file_names == ['-']:
SortImports(file_contents=sys.stdin.read(), write_to_stdout=True, **arguments)
else:
if not file_names:
file_names = ['.']
arguments['recursive'] = True
if not arguments.get('apply', False):
arguments['ask_to_apply'] = True
config = from_path(os.path.abspath(file_names[0]) or os.getcwd()).copy()
config.update(arguments)
wrong_sorted_files = False
skipped = []
if arguments.get('recursive', False):
file_names = iter_source_code(file_names, config, skipped)
num_skipped = 0
if config['verbose'] or config.get('show_logo', False):
print(INTRO)
for file_name in file_names:
try:
sort_attempt = SortImports(file_name, **arguments)
incorrectly_sorted = sort_attempt.incorrectly_sorted
if arguments.get('check', False) and incorrectly_sorted:
wrong_sorted_files = True
if sort_attempt.skipped:
num_skipped += 1
except IOError as e:
print("WARNING: Unable to parse file {0} due to {1}".format(file_name, e))
if wrong_sorted_files:
exit(1)
num_skipped += len(skipped)
if num_skipped and not arguments.get('quiet', False):
if config['verbose']:
for was_skipped in skipped:
print("WARNING: {0} was skipped as it's listed in 'skip' setting"
" or matches a glob in 'skip_glob' setting".format(was_skipped))
print("Skipped {0} files".format(num_skipped))
if __name__ == "__main__":
main()

View file

@ -1,47 +0,0 @@
"""isort/natural.py.
Enables sorting strings that contain numbers naturally
usage:
natural.nsorted(list)
Copyright (C) 2013 Timothy Edmund Crosley
Implementation originally from @HappyLeapSecond stack overflow user in response to:
http://stackoverflow.com/questions/5967500/how-to-correctly-sort-a-string-with-a-number-inside
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import re
def _atoi(text):
return int(text) if text.isdigit() else text
def _natural_keys(text):
return [_atoi(c) for c in re.split(r'(\d+)', text)]
def nsorted(to_sort, key=None):
"""Returns a naturally sorted list"""
if key is None:
key_callback = _natural_keys
else:
def key_callback(item):
return _natural_keys(key(item))
return sorted(to_sort, key=key_callback)

View file

@ -1,594 +0,0 @@
"""pie_slice/overrides.py.
Overrides Python syntax to conform to the Python3 version as much as possible using a '*' import
Copyright (C) 2013 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
import abc
import collections
import functools
import sys
from numbers import Integral
__version__ = "1.1.0"
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
VERSION = sys.version_info
native_dict = dict
native_round = round
native_filter = filter
native_map = map
native_zip = zip
native_range = range
native_str = str
native_chr = chr
native_input = input
native_next = next
native_object = object
common = ['native_dict', 'native_round', 'native_filter', 'native_map', 'native_range', 'native_str', 'native_chr',
'native_input', 'PY2', 'PY3', 'u', 'itemsview', 'valuesview', 'keysview', 'execute', 'integer_types',
'native_next', 'native_object', 'with_metaclass', 'OrderedDict', 'lru_cache']
def with_metaclass(meta, *bases):
"""Enables use of meta classes across Python Versions. taken from jinja2/_compat.py.
Use it like this::
class BaseForm(object):
pass
class FormType(type):
pass
class Form(with_metaclass(FormType, BaseForm)):
pass
"""
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
def unmodified_isinstance(*bases):
"""When called in the form
MyOverrideClass(unmodified_isinstance(BuiltInClass))
it allows calls against passed in built in instances to pass even if there not a subclass
"""
class UnmodifiedIsInstance(type):
if sys.version_info[0] == 2 and sys.version_info[1] <= 6:
@classmethod
def __instancecheck__(cls, instance):
if cls.__name__ in (str(base.__name__) for base in bases):
return isinstance(instance, bases)
subclass = getattr(instance, '__class__', None)
subtype = type(instance)
instance_type = getattr(abc, '_InstanceType', None)
if not instance_type:
class test_object:
pass
instance_type = type(test_object)
if subtype is instance_type:
subtype = subclass
if subtype is subclass or subclass is None:
return cls.__subclasscheck__(subtype)
return (cls.__subclasscheck__(subclass) or cls.__subclasscheck__(subtype))
else:
@classmethod
def __instancecheck__(cls, instance):
if cls.__name__ in (str(base.__name__) for base in bases):
return isinstance(instance, bases)
return type.__instancecheck__(cls, instance)
return with_metaclass(UnmodifiedIsInstance, *bases)
if PY3:
import urllib
import builtins
from urllib import parse
input = input
integer_types = (int, )
def u(string):
return string
def itemsview(collection):
return collection.items()
def valuesview(collection):
return collection.values()
def keysview(collection):
return collection.keys()
urllib.quote = parse.quote
urllib.quote_plus = parse.quote_plus
urllib.unquote = parse.unquote
urllib.unquote_plus = parse.unquote_plus
urllib.urlencode = parse.urlencode
execute = getattr(builtins, 'exec')
if VERSION[1] < 2:
def callable(entity):
return hasattr(entity, '__call__')
common.append('callable')
__all__ = common + ['urllib']
else:
from itertools import ifilter as filter
from itertools import imap as map
from itertools import izip as zip
from decimal import Decimal, ROUND_HALF_EVEN
import codecs
str = unicode
chr = unichr
input = raw_input
range = xrange
integer_types = (int, long)
import sys
stdout = sys.stdout
stderr = sys.stderr
# reload(sys)
# sys.stdout = stdout
# sys.stderr = stderr
# sys.setdefaultencoding('utf-8')
def _create_not_allowed(name):
def _not_allow(*args, **kwargs):
raise NameError("name '{0}' is not defined".format(name))
_not_allow.__name__ = name
return _not_allow
for removed in ('apply', 'cmp', 'coerce', 'execfile', 'raw_input', 'unpacks'):
globals()[removed] = _create_not_allowed(removed)
def u(s):
if isinstance(s, unicode):
return s
else:
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
def execute(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
class _dict_view_base(object):
__slots__ = ('_dictionary', )
def __init__(self, dictionary):
self._dictionary = dictionary
def __repr__(self):
return "{0}({1})".format(self.__class__.__name__, str(list(self.__iter__())))
def __unicode__(self):
return str(self.__repr__())
def __str__(self):
return str(self.__unicode__())
class dict_keys(_dict_view_base):
__slots__ = ()
def __iter__(self):
return self._dictionary.iterkeys()
class dict_values(_dict_view_base):
__slots__ = ()
def __iter__(self):
return self._dictionary.itervalues()
class dict_items(_dict_view_base):
__slots__ = ()
def __iter__(self):
return self._dictionary.iteritems()
def itemsview(collection):
return dict_items(collection)
def valuesview(collection):
return dict_values(collection)
def keysview(collection):
return dict_keys(collection)
class dict(unmodified_isinstance(native_dict)):
def has_key(self, *args, **kwargs):
return AttributeError("'dict' object has no attribute 'has_key'")
def items(self):
return dict_items(self)
def keys(self):
return dict_keys(self)
def values(self):
return dict_values(self)
def round(number, ndigits=None):
return_int = False
if ndigits is None:
return_int = True
ndigits = 0
if hasattr(number, '__round__'):
return number.__round__(ndigits)
if ndigits < 0:
raise NotImplementedError('negative ndigits not supported yet')
exponent = Decimal('10') ** (-ndigits)
d = Decimal.from_float(number).quantize(exponent,
rounding=ROUND_HALF_EVEN)
if return_int:
return int(d)
else:
return float(d)
def next(iterator):
try:
iterator.__next__()
except Exception:
native_next(iterator)
class FixStr(type):
def __new__(cls, name, bases, dct):
if '__str__' in dct:
dct['__unicode__'] = dct['__str__']
dct['__str__'] = lambda self: self.__unicode__().encode('utf-8')
return type.__new__(cls, name, bases, dct)
if sys.version_info[1] <= 6:
def __instancecheck__(cls, instance):
if cls.__name__ == "object":
return isinstance(instance, native_object)
subclass = getattr(instance, '__class__', None)
subtype = type(instance)
instance_type = getattr(abc, '_InstanceType', None)
if not instance_type:
class test_object:
pass
instance_type = type(test_object)
if subtype is instance_type:
subtype = subclass
if subtype is subclass or subclass is None:
return cls.__subclasscheck__(subtype)
return (cls.__subclasscheck__(subclass) or cls.__subclasscheck__(subtype))
else:
def __instancecheck__(cls, instance):
if cls.__name__ == "object":
return isinstance(instance, native_object)
return type.__instancecheck__(cls, instance)
class object(with_metaclass(FixStr, object)):
pass
__all__ = common + ['round', 'dict', 'apply', 'cmp', 'coerce', 'execfile', 'raw_input', 'unpacks', 'str', 'chr',
'input', 'range', 'filter', 'map', 'zip', 'object']
if sys.version_info[0] == 2 and sys.version_info[1] < 7:
# OrderedDict
# Copyright (c) 2009 Raymond Hettinger
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from UserDict import DictMixin
class OrderedDict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.__map = {} # key --> [key, prev, next]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
key, prev, next = self.__map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise KeyError('dictionary is empty')
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = self.__map, self.__end
del self.__map, self.__end
inst_dict = vars(self).copy()
self.__map, self.__end = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
iterkeys = DictMixin.iterkeys
itervalues = DictMixin.itervalues
iteritems = DictMixin.iteritems
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
if len(self) != len(other):
return False
for p, q in zip(self.items(), other.items()):
if p != q:
return False
return True
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
else:
from collections import OrderedDict
if sys.version_info < (3, 2):
try:
from threading import Lock
except ImportError:
from dummy_threading import Lock
from functools import wraps
def lru_cache(maxsize=100):
"""Least-recently-used cache decorator.
Taking from: https://github.com/MiCHiLU/python-functools32/blob/master/functools32/functools32.py
with slight modifications.
If *maxsize* is set to None, the LRU features are disabled and the cache
can grow without bound.
Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, maxsize, currsize) with
f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__.
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
"""
def decorating_function(user_function, tuple=tuple, sorted=sorted, len=len, KeyError=KeyError):
hits, misses = [0], [0]
kwd_mark = (object(),) # separates positional and keyword args
lock = Lock()
if maxsize is None:
CACHE = dict()
@wraps(user_function)
def wrapper(*args, **kwds):
key = args
if kwds:
key += kwd_mark + tuple(sorted(kwds.items()))
try:
result = CACHE[key]
hits[0] += 1
return result
except KeyError:
pass
result = user_function(*args, **kwds)
CACHE[key] = result
misses[0] += 1
return result
else:
CACHE = OrderedDict()
@wraps(user_function)
def wrapper(*args, **kwds):
key = args
if kwds:
key += kwd_mark + tuple(sorted(kwds.items()))
with lock:
cached = CACHE.get(key, None)
if cached:
del CACHE[key]
CACHE[key] = cached
hits[0] += 1
return cached
result = user_function(*args, **kwds)
with lock:
CACHE[key] = result # record recent use of this key
misses[0] += 1
while len(CACHE) > maxsize:
CACHE.popitem(last=False)
return result
def cache_info():
"""Report CACHE statistics."""
with lock:
return _CacheInfo(hits[0], misses[0], maxsize, len(CACHE))
def cache_clear():
"""Clear the CACHE and CACHE statistics."""
with lock:
CACHE.clear()
hits[0] = misses[0] = 0
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
return wrapper
return decorating_function
else:
from functools import lru_cache
class OrderedSet(collections.MutableSet):
def __init__(self, iterable=None):
self.end = end = []
end += [None, end, end]
self.map = {}
if iterable is not None:
self |= iterable
def __len__(self):
return len(self.map)
def __contains__(self, key):
return key in self.map
def add(self, key):
if key not in self.map:
end = self.end
curr = end[1]
curr[2] = end[1] = self.map[key] = [key, curr, end]
def discard(self, key):
if key in self.map:
key, prev, next = self.map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def pop(self, last=True):
if not self:
raise KeyError('set is empty')
key = self.end[1][0] if last else self.end[2][0]
self.discard(key)
return key
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self))
def __eq__(self, other):
if isinstance(other, OrderedSet):
return len(self) == len(other) and list(self) == list(other)
return set(self) == set(other)
def update(self, other):
for item in other:
self.add(item)

View file

@ -1,29 +0,0 @@
import os
import sys
from pylama.lint import Linter as BaseLinter
from .isort import SortImports
class Linter(BaseLinter):
def allow(self, path):
"""Determine if this path should be linted."""
return path.endswith('.py')
def run(self, path, **meta):
"""Lint the file. Return an array of error dicts if appropriate."""
with open(os.devnull, 'w') as devnull:
# Suppress isort messages
sys.stdout = devnull
if SortImports(path, check=True).incorrectly_sorted:
return [{
'lnum': 0,
'col': 0,
'text': 'Incorrectly sorted imports.',
'type': 'ISORT'
}]
else:
return []

View file

@ -1,256 +0,0 @@
"""isort/settings.py.
Defines how the default settings for isort should be loaded
(First from the default setting dictionary at the top of the file, then overridden by any settings
in ~/.isort.cfg if there are any)
Copyright (C) 2013 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import fnmatch
import os
import posixpath
from collections import namedtuple
from .pie_slice import itemsview, lru_cache, native_str
try:
import configparser
except ImportError:
import ConfigParser as configparser
MAX_CONFIG_SEARCH_DEPTH = 25 # The number of parent directories isort will look for a config file within
DEFAULT_SECTIONS = ('FUTURE', 'STDLIB', 'THIRDPARTY', 'FIRSTPARTY', 'LOCALFOLDER')
WrapModes = ('GRID', 'VERTICAL', 'HANGING_INDENT', 'VERTICAL_HANGING_INDENT', 'VERTICAL_GRID', 'VERTICAL_GRID_GROUPED', 'NOQA')
WrapModes = namedtuple('WrapModes', WrapModes)(*range(len(WrapModes)))
# Note that none of these lists must be complete as they are simply fallbacks for when included auto-detection fails.
default = {'force_to_top': [],
'skip': ['__init__.py', ],
'skip_glob': [],
'line_length': 79,
'wrap_length': 0,
'sections': DEFAULT_SECTIONS,
'no_sections': False,
'known_future_library': ['__future__'],
'known_standard_library': ['AL', 'BaseHTTPServer', 'Bastion', 'CGIHTTPServer', 'Carbon', 'ColorPicker',
'ConfigParser', 'Cookie', 'DEVICE', 'DocXMLRPCServer', 'EasyDialogs', 'FL',
'FrameWork', 'GL', 'HTMLParser', 'MacOS', 'MimeWriter', 'MiniAEFrame', 'Nav',
'PixMapWrapper', 'Queue', 'SUNAUDIODEV', 'ScrolledText', 'SimpleHTTPServer',
'SimpleXMLRPCServer', 'SocketServer', 'StringIO', 'Tix', 'Tkinter', 'UserDict',
'UserList', 'UserString', 'W', '__builtin__', 'abc', 'aepack', 'aetools',
'aetypes', 'aifc', 'al', 'anydbm', 'applesingle', 'argparse', 'array', 'ast',
'asynchat', 'asyncio', 'asyncore', 'atexit', 'audioop', 'autoGIL', 'base64',
'bdb', 'binascii', 'binhex', 'bisect', 'bsddb', 'buildtools', 'builtins',
'bz2', 'cPickle', 'cProfile', 'cStringIO', 'calendar', 'cd', 'cfmfile', 'cgi',
'cgitb', 'chunk', 'cmath', 'cmd', 'code', 'codecs', 'codeop', 'collections',
'colorsys', 'commands', 'compileall', 'compiler', 'concurrent', 'configparser',
'contextlib', 'cookielib', 'copy', 'copy_reg', 'copyreg', 'crypt', 'csv',
'ctypes', 'curses', 'datetime', 'dbhash', 'dbm', 'decimal', 'difflib',
'dircache', 'dis', 'distutils', 'dl', 'doctest', 'dumbdbm', 'dummy_thread',
'dummy_threading', 'email', 'encodings', 'ensurepip', 'enum', 'errno',
'exceptions', 'faulthandler', 'fcntl', 'filecmp', 'fileinput', 'findertools',
'fl', 'flp', 'fm', 'fnmatch', 'formatter', 'fpectl', 'fpformat', 'fractions',
'ftplib', 'functools', 'future_builtins', 'gc', 'gdbm', 'gensuitemodule',
'getopt', 'getpass', 'gettext', 'gl', 'glob', 'grp', 'gzip', 'hashlib',
'heapq', 'hmac', 'hotshot', 'html', 'htmlentitydefs', 'htmllib', 'http',
'httplib', 'ic', 'icopen', 'imageop', 'imaplib', 'imgfile', 'imghdr', 'imp',
'importlib', 'imputil', 'inspect', 'io', 'ipaddress', 'itertools', 'jpeg',
'json', 'keyword', 'lib2to3', 'linecache', 'locale', 'logging', 'lzma',
'macerrors', 'macostools', 'macpath', 'macresource', 'mailbox', 'mailcap',
'marshal', 'math', 'md5', 'mhlib', 'mimetools', 'mimetypes', 'mimify', 'mmap',
'modulefinder', 'msilib', 'msvcrt', 'multifile', 'multiprocessing', 'mutex',
'netrc', 'new', 'nis', 'nntplib', 'numbers', 'operator', 'optparse', 'os',
'ossaudiodev', 'parser', 'pathlib', 'pdb', 'pickle', 'pickletools', 'pipes',
'pkgutil', 'platform', 'plistlib', 'popen2', 'poplib', 'posix', 'posixfile',
'pprint', 'profile', 'pstats', 'pty', 'pwd', 'py_compile', 'pyclbr', 'pydoc',
'queue', 'quopri', 'random', 're', 'readline', 'reprlib', 'resource', 'rexec',
'rfc822', 'rlcompleter', 'robotparser', 'runpy', 'sched', 'secrets', 'select',
'selectors', 'sets', 'sgmllib', 'sha', 'shelve', 'shlex', 'shutil', 'signal',
'site', 'sitecustomize', 'smtpd', 'smtplib', 'sndhdr', 'socket', 'socketserver',
'spwd', 'sqlite3', 'ssl', 'stat', 'statistics', 'statvfs', 'string', 'stringprep',
'struct', 'subprocess', 'sunau', 'sunaudiodev', 'symbol', 'symtable', 'sys',
'sysconfig', 'syslog', 'tabnanny', 'tarfile', 'telnetlib', 'tempfile', 'termios',
'test', 'textwrap', 'this', 'thread', 'threading', 'time', 'timeit', 'tkinter',
'token', 'tokenize', 'trace', 'traceback', 'tracemalloc', 'ttk', 'tty', 'turtle',
'turtledemo', 'types', 'typing', 'unicodedata', 'unittest', 'urllib', 'urllib2',
'urlparse', 'user', 'usercustomize', 'uu', 'uuid', 'venv', 'videoreader',
'warnings', 'wave', 'weakref', 'webbrowser', 'whichdb', 'winreg', 'winsound',
'wsgiref', 'xdrlib', 'xml', 'xmlrpc', 'xmlrpclib', 'zipapp', 'zipfile',
'zipimport', 'zlib'],
'known_third_party': ['google.appengine.api'],
'known_first_party': [],
'multi_line_output': WrapModes.GRID,
'forced_separate': [],
'indent': ' ' * 4,
'length_sort': False,
'add_imports': [],
'remove_imports': [],
'force_single_line': False,
'default_section': 'FIRSTPARTY',
'import_heading_future': '',
'import_heading_stdlib': '',
'import_heading_thirdparty': '',
'import_heading_firstparty': '',
'import_heading_localfolder': '',
'balanced_wrapping': False,
'use_parentheses': False,
'order_by_type': True,
'atomic': False,
'lines_after_imports': -1,
'lines_between_sections': 1,
'lines_between_types': 0,
'combine_as_imports': False,
'combine_star': False,
'include_trailing_comma': False,
'from_first': False,
'verbose': False,
'quiet': False,
'force_adds': False,
'force_alphabetical_sort_within_sections': False,
'force_alphabetical_sort': False,
'force_grid_wrap': 0,
'force_sort_within_sections': False,
'show_diff': False,
'ignore_whitespace': False}
@lru_cache()
def from_path(path):
computed_settings = default.copy()
_update_settings_with_config(path, '.editorconfig', '~/.editorconfig', ('*', '*.py', '**.py'), computed_settings)
_update_settings_with_config(path, '.isort.cfg', '~/.isort.cfg', ('settings', 'isort'), computed_settings)
_update_settings_with_config(path, 'setup.cfg', None, ('isort', ), computed_settings)
_update_settings_with_config(path, 'tox.ini', None, ('isort', ), computed_settings)
return computed_settings
def _update_settings_with_config(path, name, default, sections, computed_settings):
editor_config_file = default and os.path.expanduser(default)
tries = 0
current_directory = path
while current_directory and tries < MAX_CONFIG_SEARCH_DEPTH:
potential_path = os.path.join(current_directory, native_str(name))
if os.path.exists(potential_path):
editor_config_file = potential_path
break
new_directory = os.path.split(current_directory)[0]
if current_directory == new_directory:
break
current_directory = new_directory
tries += 1
if editor_config_file and os.path.exists(editor_config_file):
_update_with_config_file(editor_config_file, sections, computed_settings)
def _update_with_config_file(file_path, sections, computed_settings):
settings = _get_config_data(file_path, sections).copy()
if not settings:
return
if file_path.endswith('.editorconfig'):
indent_style = settings.pop('indent_style', '').strip()
indent_size = settings.pop('indent_size', '').strip()
if indent_style == 'space':
computed_settings['indent'] = ' ' * (indent_size and int(indent_size) or 4)
elif indent_style == 'tab':
computed_settings['indent'] = '\t' * (indent_size and int(indent_size) or 1)
max_line_length = settings.pop('max_line_length', '').strip()
if max_line_length:
computed_settings['line_length'] = float('inf') if max_line_length == 'off' else int(max_line_length)
for key, value in itemsview(settings):
access_key = key.replace('not_', '').lower()
existing_value_type = type(default.get(access_key, ''))
if existing_value_type in (list, tuple):
# sections has fixed order values; no adding or substraction from any set
if access_key == 'sections':
computed_settings[access_key] = tuple(_as_list(value))
else:
existing_data = set(computed_settings.get(access_key, default.get(access_key)))
if key.startswith('not_'):
computed_settings[access_key] = list(existing_data.difference(_as_list(value)))
else:
computed_settings[access_key] = list(existing_data.union(_as_list(value)))
elif existing_value_type == bool and value.lower().strip() == 'false':
computed_settings[access_key] = False
elif key.startswith('known_'):
computed_settings[access_key] = list(_as_list(value))
elif key == 'force_grid_wrap':
try:
result = existing_value_type(value)
except ValueError:
# backwards compat
result = default.get(access_key) if value.lower().strip() == 'false' else 2
computed_settings[access_key] = result
else:
computed_settings[access_key] = existing_value_type(value)
def _as_list(value):
return filter(bool, [item.strip() for item in value.replace('\n', ',').split(',')])
@lru_cache()
def _get_config_data(file_path, sections):
with open(file_path, 'rU') as config_file:
if file_path.endswith('.editorconfig'):
line = '\n'
last_position = config_file.tell()
while line:
line = config_file.readline()
if '[' in line:
config_file.seek(last_position)
break
last_position = config_file.tell()
config = configparser.SafeConfigParser()
config.readfp(config_file)
settings = dict()
for section in sections:
if config.has_section(section):
settings.update(dict(config.items(section)))
return settings
return {}
def should_skip(filename, config, path='/'):
"""Returns True if the file should be skipped based on the passed in settings."""
for skip_path in config['skip']:
if posixpath.abspath(posixpath.join(path, filename)) == posixpath.abspath(skip_path.replace('\\', '/')):
return True
position = os.path.split(filename)
while position[1]:
if position[1] in config['skip']:
return True
position = os.path.split(position[0])
for glob in config['skip_glob']:
if fnmatch.fnmatch(filename, glob):
return True
return False

File diff suppressed because it is too large Load diff

View file

@ -1,158 +0,0 @@
# Grammar for 2to3. This grammar supports Python 2.x and 3.x.
# Note: Changing the grammar specified in this file will most likely
# require corresponding changes in the parser module
# (../Modules/parsermodule.c). If you can't make the changes to
# that module yourself, please co-ordinate the required changes
# with someone who can; ask around on python-dev for help. Fred
# Drake <fdrake@acm.org> will probably be listening there.
# NOTE WELL: You should also follow all the steps listed in PEP 306,
# "How to Change Python's Grammar"
# Commands for Kees Blom's railroad program
#diagram:token NAME
#diagram:token NUMBER
#diagram:token STRING
#diagram:token NEWLINE
#diagram:token ENDMARKER
#diagram:token INDENT
#diagram:output\input python.bla
#diagram:token DEDENT
#diagram:output\textwidth 20.04cm\oddsidemargin 0.0cm\evensidemargin 0.0cm
#diagram:rules
# Start symbols for the grammar:
# file_input is a module or sequence of commands read from an input file;
# single_input is a single interactive statement;
# eval_input is the input for the eval() and input() functions.
# NB: compound_stmt in single_input is followed by extra NEWLINE!
file_input: (NEWLINE | stmt)* ENDMARKER
single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
eval_input: testlist NEWLINE* ENDMARKER
decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
decorators: decorator+
decorated: decorators (classdef | funcdef)
funcdef: 'def' NAME parameters ['->' test] ':' suite
parameters: '(' [typedargslist] ')'
typedargslist: ((tfpdef ['=' test] ',')*
('*' [tname] (',' tname ['=' test])* [',' '**' tname] | '**' tname)
| tfpdef ['=' test] (',' tfpdef ['=' test])* [','])
tname: NAME [':' test]
tfpdef: tname | '(' tfplist ')'
tfplist: tfpdef (',' tfpdef)* [',']
varargslist: ((vfpdef ['=' test] ',')*
('*' [vname] (',' vname ['=' test])* [',' '**' vname] | '**' vname)
| vfpdef ['=' test] (',' vfpdef ['=' test])* [','])
vname: NAME
vfpdef: vname | '(' vfplist ')'
vfplist: vfpdef (',' vfpdef)* [',']
stmt: simple_stmt | compound_stmt
simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
small_stmt: (expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
import_stmt | global_stmt | exec_stmt | assert_stmt)
expr_stmt: testlist_star_expr (augassign (yield_expr|testlist) |
('=' (yield_expr|testlist_star_expr))*)
testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [',']
augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
'<<=' | '>>=' | '**=' | '//=')
# For normal assignments, additional restrictions enforced by the interpreter
print_stmt: 'print' ( [ test (',' test)* [','] ] |
'>>' test [ (',' test)+ [','] ] )
del_stmt: 'del' exprlist
pass_stmt: 'pass'
flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt
break_stmt: 'break'
continue_stmt: 'continue'
return_stmt: 'return' [testlist]
yield_stmt: yield_expr
raise_stmt: 'raise' [test ['from' test | ',' test [',' test]]]
import_stmt: import_name | import_from
import_name: 'import' dotted_as_names
import_from: ('from' ('.'* dotted_name | '.'+)
'import' ('*' | '(' import_as_names ')' | import_as_names))
import_as_name: NAME ['as' NAME]
dotted_as_name: dotted_name ['as' NAME]
import_as_names: import_as_name (',' import_as_name)* [',']
dotted_as_names: dotted_as_name (',' dotted_as_name)*
dotted_name: NAME ('.' NAME)*
global_stmt: ('global' | 'nonlocal') NAME (',' NAME)*
exec_stmt: 'exec' expr ['in' test [',' test]]
assert_stmt: 'assert' test [',' test]
compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated
if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
while_stmt: 'while' test ':' suite ['else' ':' suite]
for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
try_stmt: ('try' ':' suite
((except_clause ':' suite)+
['else' ':' suite]
['finally' ':' suite] |
'finally' ':' suite))
with_stmt: 'with' with_item (',' with_item)* ':' suite
with_item: test ['as' expr]
with_var: 'as' expr
# NB compile.c makes sure that the default except clause is last
except_clause: 'except' [test [(',' | 'as') test]]
suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT
# Backward compatibility cruft to support:
# [ x for x in lambda: True, lambda: False if x() ]
# even while also allowing:
# lambda x: 5 if x else 2
# (But not a mix of the two)
testlist_safe: old_test [(',' old_test)+ [',']]
old_test: or_test | old_lambdef
old_lambdef: 'lambda' [varargslist] ':' old_test
test: or_test ['if' or_test 'else' test] | lambdef
or_test: and_test ('or' and_test)*
and_test: not_test ('and' not_test)*
not_test: 'not' not_test | comparison
comparison: expr (comp_op expr)*
comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
star_expr: '*' expr
expr: xor_expr ('|' xor_expr)*
xor_expr: and_expr ('^' and_expr)*
and_expr: shift_expr ('&' shift_expr)*
shift_expr: arith_expr (('<<'|'>>') arith_expr)*
arith_expr: term (('+'|'-') term)*
term: factor (('*'|'/'|'%'|'//') factor)*
factor: ('+'|'-'|'~') factor | power
power: atom trailer* ['**' factor]
atom: ('(' [yield_expr|testlist_gexp] ')' |
'[' [listmaker] ']' |
'{' [dictsetmaker] '}' |
'`' testlist1 '`' |
NAME | NUMBER | STRING+ | '.' '.' '.')
listmaker: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
testlist_gexp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
lambdef: 'lambda' [varargslist] ':' test
trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
subscriptlist: subscript (',' subscript)* [',']
subscript: test | [test] ':' [test] [sliceop]
sliceop: ':' [test]
exprlist: (expr|star_expr) (',' (expr|star_expr))* [',']
testlist: test (',' test)* [',']
dictsetmaker: ( (test ':' test (comp_for | (',' test ':' test)* [','])) |
(test (comp_for | (',' test)* [','])) )
classdef: 'class' NAME ['(' [arglist] ')'] ':' suite
arglist: (argument ',')* (argument [',']
|'*' test (',' argument)* [',' '**' test]
|'**' test)
argument: test [comp_for] | test '=' test # Really [keyword '='] test
comp_iter: comp_for | comp_if
comp_for: 'for' exprlist 'in' testlist_safe [comp_iter]
comp_if: 'if' old_test [comp_iter]
testlist1: test (',' test)*
# not used in grammar, but may appear in "node" passed from Parser to Compiler
encoding_decl: NAME
yield_expr: 'yield' [testlist]

Some files were not shown because too many files have changed in this diff Show more